diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 1f5f7e57dc4859..45a5f3ceb67b44 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -29,19 +29,19 @@ Objects/type* @markshannon Objects/codeobject.c @markshannon Objects/frameobject.c @markshannon Objects/call.c @markshannon -Python/ceval*.c @markshannon @gvanrossum -Python/ceval*.h @markshannon @gvanrossum +Python/ceval*.c @markshannon +Python/ceval*.h @markshannon Python/compile.c @markshannon @iritkatriel Python/assemble.c @markshannon @iritkatriel Python/flowgraph.c @markshannon @iritkatriel Python/ast_opt.c @isidentical -Python/bytecodes.c @markshannon @gvanrossum -Python/optimizer*.c @markshannon @gvanrossum +Python/bytecodes.c @markshannon +Python/optimizer*.c @markshannon Python/optimizer_analysis.c @Fidget-Spinner Python/optimizer_bytecodes.c @Fidget-Spinner Lib/test/test_patma.py @brandtbucher Lib/test/test_type_*.py @JelleZijlstra -Lib/test/test_capi/test_misc.py @markshannon @gvanrossum +Lib/test/test_capi/test_misc.py @markshannon Tools/c-analyzer/ @ericsnowcurrently # dbm @@ -150,7 +150,7 @@ Include/internal/pycore_time.h @pganssle @abalkin /Lib/test/test_tokenize.py @pablogsal @lysnikolaou # Code generator -/Tools/cases_generator/ @gvanrossum +/Tools/cases_generator/ @markshannon # AST Python/ast.c @isidentical diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 1b2677ff6fe889..04020de7c950c1 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -8,19 +8,11 @@ on: push: branches: - 'main' - - '3.12' - - '3.11' - - '3.10' - - '3.9' - - '3.8' + - '3.*' pull_request: branches: - 'main' - - '3.12' - - '3.11' - - '3.10' - - '3.9' - - '3.8' + - '3.*' permissions: contents: read @@ -207,8 +199,9 @@ jobs: uses: ./.github/workflows/reusable-macos.yml with: config_hash: ${{ needs.check_source.outputs.config_hash }} - # macos-14 is M1, macos-13 is Intel - os-matrix: '["macos-14", "macos-13"]' + # Cirrus and macos-14 are M1, macos-13 is default GHA Intel. + # Cirrus used for upstream, macos-14 for forks. + os-matrix: '["ghcr.io/cirruslabs/macos-runner:sonoma", "macos-14", "macos-13"]' build_macos_free_threading: name: 'macOS (free-threading)' @@ -218,8 +211,9 @@ jobs: with: config_hash: ${{ needs.check_source.outputs.config_hash }} free-threading: true - # macos-14-large is Intel with 12 cores (most parallelism) - os-matrix: '["macos-14"]' + # Cirrus and macos-14 are M1. + # Cirrus used for upstream, macos-14 for forks. + os-matrix: '["ghcr.io/cirruslabs/macos-runner:sonoma", "macos-14"]' build_ubuntu: name: 'Ubuntu' @@ -494,6 +488,7 @@ jobs: config_hash: ${{ needs.check_source.outputs.config_hash }} options: ./configure --config-cache --with-thread-sanitizer --with-pydebug suppressions_path: Tools/tsan/supressions.txt + tsan_logs_artifact_name: tsan-logs-default build_tsan_free_threading: name: 'Thread sanitizer (free-threading)' @@ -504,6 +499,7 @@ jobs: config_hash: ${{ needs.check_source.outputs.config_hash }} options: ./configure --config-cache --disable-gil --with-thread-sanitizer --with-pydebug suppressions_path: Tools/tsan/suppressions_free_threading.txt + tsan_logs_artifact_name: tsan-logs-free-threading # CIFuzz job based on https://google.github.io/oss-fuzz/getting-started/continuous-integration/ cifuzz: diff --git a/.github/workflows/jit.yml b/.github/workflows/jit.yml index 7152cde8f4607c..8c760a81d52662 100644 --- a/.github/workflows/jit.yml +++ b/.github/workflows/jit.yml @@ -5,11 +5,17 @@ on: - '**jit**' - 'Python/bytecodes.c' - 'Python/optimizer*.c' + - '!Python/perf_jit_trampoline.c' + - '!**/*.md' + - '!**/*.ini' push: paths: - '**jit**' - 'Python/bytecodes.c' - 'Python/optimizer*.c' + - '!Python/perf_jit_trampoline.c' + - '!**/*.md' + - '!**/*.ini' workflow_dispatch: permissions: @@ -20,8 +26,22 @@ concurrency: cancel-in-progress: true jobs: + interpreter: + name: Interpreter (Debug) + runs-on: ubuntu-latest + timeout-minutes: 90 + steps: + - uses: actions/checkout@v4 + - name: Build tier two interpreter + run: | + ./configure --enable-experimental-jit=interpreter --with-pydebug + make all --jobs 4 + - name: Test tier two interpreter + run: | + ./python -m test --multiprocess 0 --timeout 4500 --verbose2 --verbose3 jit: name: ${{ matrix.target }} (${{ matrix.debug && 'Debug' || 'Release' }}) + needs: interpreter runs-on: ${{ matrix.runner }} timeout-minutes: 90 strategy: @@ -144,3 +164,22 @@ jobs: ./configure --enable-experimental-jit ${{ matrix.debug && '--with-pydebug' || '--enable-optimizations ' }} --build=x86_64-linux-gnu --host="$HOST" --with-build-python=../build/bin/python3 --with-pkg-config=no ac_cv_buggy_getaddrinfo=no ac_cv_file__dev_ptc=no ac_cv_file__dev_ptmx=yes make all --jobs 4 ./python -m test --ignorefile=Tools/jit/ignore-tests-emulated-linux.txt --multiprocess 0 --timeout 4500 --verbose2 --verbose3 + + jit-with-disabled-gil: + name: Free-Threaded (Debug) + needs: interpreter + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: '3.11' + - name: Build with JIT enabled and GIL disabled + run: | + sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" ./llvm.sh 18 + export PATH="$(llvm-config-18 --bindir):$PATH" + ./configure --enable-experimental-jit --with-pydebug --disable-gil + make all --jobs 4 + - name: Run tests + run: | + ./python -m test --multiprocess 0 --timeout 4500 --verbose2 --verbose3 diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml index 35996f237814ba..1b2d998182e0f7 100644 --- a/.github/workflows/mypy.yml +++ b/.github/workflows/mypy.yml @@ -34,6 +34,7 @@ concurrency: jobs: mypy: strategy: + fail-fast: false matrix: target: [ "Lib/_pyrepl", diff --git a/.github/workflows/reusable-docs.yml b/.github/workflows/reusable-docs.yml index 9e26d7847d2bd3..859f78d043ba92 100644 --- a/.github/workflows/reusable-docs.yml +++ b/.github/workflows/reusable-docs.yml @@ -62,7 +62,8 @@ jobs: python Doc/tools/check-warnings.py \ --annotate-diff '${{ env.branch_base }}' '${{ env.branch_pr }}' \ --fail-if-regression \ - --fail-if-improved + --fail-if-improved \ + --fail-if-new-news-nit # This build doesn't use problem matchers or check annotations build_doc_oldest_supported_sphinx: diff --git a/.github/workflows/reusable-macos.yml b/.github/workflows/reusable-macos.yml index d06a718d199c96..f825d1a7b3f69a 100644 --- a/.github/workflows/reusable-macos.yml +++ b/.github/workflows/reusable-macos.yml @@ -14,7 +14,7 @@ on: jobs: build_macos: - name: 'build and test' + name: build and test (${{ matrix.os }}) timeout-minutes: 60 env: HOMEBREW_NO_ANALYTICS: 1 @@ -27,6 +27,13 @@ jobs: fail-fast: false matrix: os: ${{fromJson(inputs.os-matrix)}} + is-fork: + - ${{ github.repository_owner != 'python' }} + exclude: + - os: "ghcr.io/cirruslabs/macos-runner:sonoma" + is-fork: true + - os: "macos-14" + is-fork: false runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/reusable-tsan.yml b/.github/workflows/reusable-tsan.yml index 48bd5b547e8cba..b6d5d8fa1c7157 100644 --- a/.github/workflows/reusable-tsan.yml +++ b/.github/workflows/reusable-tsan.yml @@ -11,6 +11,10 @@ on: description: 'A repo relative path to the suppressions file' required: true type: string + tsan_logs_artifact_name: + description: 'Name of the TSAN logs artifact. Must be unique for each job.' + required: true + type: string jobs: build_tsan_reusable: @@ -41,7 +45,7 @@ jobs: sudo sysctl -w vm.mmap_rnd_bits=28 - name: TSAN Option Setup run: | - echo "TSAN_OPTIONS=suppressions=${GITHUB_WORKSPACE}/${{ inputs.suppressions_path }}" >> $GITHUB_ENV + echo "TSAN_OPTIONS=log_path=${GITHUB_WORKSPACE}/tsan_log suppressions=${GITHUB_WORKSPACE}/${{ inputs.suppressions_path }} handle_segv=0" >> $GITHUB_ENV echo "CC=clang" >> $GITHUB_ENV echo "CXX=clang++" >> $GITHUB_ENV - name: Add ccache to PATH @@ -60,3 +64,13 @@ jobs: run: make pythoninfo - name: Tests run: ./python -m test --tsan -j4 + - name: Display TSAN logs + if: always() + run: find ${GITHUB_WORKSPACE} -name 'tsan_log.*' | xargs head -n 1000 + - name: Archive TSAN logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: ${{ inputs.tsan_logs_artifact_name }} + path: tsan_log.* + if-no-files-found: ignore diff --git a/Doc/Makefile b/Doc/Makefile index dd068c520ad60c..1cbfc722b010f5 100644 --- a/Doc/Makefile +++ b/Doc/Makefile @@ -32,6 +32,7 @@ help: @echo " clean to remove build files" @echo " venv to create a venv with necessary tools" @echo " html to make standalone HTML files" + @echo " gettext to generate POT files" @echo " htmlview to open the index page built by the html target in your browser" @echo " htmllive to rebuild and reload HTML files in your browser" @echo " htmlhelp to make HTML files and a HTML help project" @@ -140,14 +141,23 @@ pydoc-topics: build @echo "Building finished; now run this:" \ "cp build/pydoc-topics/topics.py ../Lib/pydoc_data/topics.py" +.PHONY: gettext +gettext: BUILDER = gettext +gettext: SPHINXOPTS += '-d build/doctrees-gettext' +gettext: build + .PHONY: htmlview htmlview: html $(PYTHON) -c "import os, webbrowser; webbrowser.open('file://' + os.path.realpath('build/html/index.html'))" +.PHONY: ensure-sphinx-autobuild +ensure-sphinx-autobuild: venv + $(VENVDIR)/bin/sphinx-autobuild --version > /dev/null || $(VENVDIR)/bin/python3 -m pip install sphinx-autobuild + .PHONY: htmllive htmllive: SPHINXBUILD = $(VENVDIR)/bin/sphinx-autobuild htmllive: SPHINXOPTS = --re-ignore="/venv/" --open-browser --delay 0 -htmllive: html +htmllive: ensure-sphinx-autobuild html .PHONY: clean clean: clean-venv diff --git a/Doc/c-api/buffer.rst b/Doc/c-api/buffer.rst index 1e1cabdf242bd1..9500fe465c7d94 100644 --- a/Doc/c-api/buffer.rst +++ b/Doc/c-api/buffer.rst @@ -147,9 +147,9 @@ a buffer, see :c:func:`PyObject_GetBuffer`. or a :c:macro:`PyBUF_WRITABLE` request, the consumer must disregard :c:member:`~Py_buffer.itemsize` and assume ``itemsize == 1``. - .. c:member:: const char *format + .. c:member:: char *format - A *NUL* terminated string in :mod:`struct` module style syntax describing + A *NULL* terminated string in :mod:`struct` module style syntax describing the contents of a single item. If this is ``NULL``, ``"B"`` (unsigned bytes) is assumed. diff --git a/Doc/c-api/dict.rst b/Doc/c-api/dict.rst index 03f3d28187bfe9..49a78583a6fe26 100644 --- a/Doc/c-api/dict.rst +++ b/Doc/c-api/dict.rst @@ -191,6 +191,7 @@ Dictionary Objects to both *default_value* and *\*result* (if it's not ``NULL``). These may refer to the same object: in that case you hold two separate references to it. + .. versionadded:: 3.13 diff --git a/Doc/c-api/frame.rst b/Doc/c-api/frame.rst index 82e0980ad753d0..638a740e0c24da 100644 --- a/Doc/c-api/frame.rst +++ b/Doc/c-api/frame.rst @@ -121,17 +121,18 @@ See also :ref:`Reflection `. .. c:function:: PyObject* PyFrame_GetLocals(PyFrameObject *frame) Get the *frame*'s :attr:`~frame.f_locals` attribute. - If the frame refers to a function or comprehension, this returns - a write-through proxy object that allows modifying the locals. - In all other cases (classes, modules) it returns the :class:`dict` - representing the frame locals directly. + If the frame refers to an :term:`optimized scope`, this returns a + write-through proxy object that allows modifying the locals. + In all other cases (classes, modules, :func:`exec`, :func:`eval`) it returns + the mapping representing the frame locals directly (as described for + :func:`locals`). Return a :term:`strong reference`. .. versionadded:: 3.11 .. versionchanged:: 3.13 - Return a proxy object for functions and comprehensions. + As part of :pep:`667`, return a proxy object for optimized scopes. .. c:function:: int PyFrame_GetLineNumber(PyFrameObject *frame) diff --git a/Doc/c-api/hash.rst b/Doc/c-api/hash.rst index ddf0b3e15dbdbe..7345a048a4128b 100644 --- a/Doc/c-api/hash.rst +++ b/Doc/c-api/hash.rst @@ -29,6 +29,12 @@ See also the :c:member:`PyTypeObject.tp_hash` member and :ref:`numeric-hash`. .. versionadded:: 3.13 +.. c:macro:: PyHASH_MULTIPLIER + + Prime multiplier used in string and various other hashes. + + .. versionadded:: 3.13 + .. c:macro:: PyHASH_INF The hash value returned for a positive infinity. diff --git a/Doc/c-api/module.rst b/Doc/c-api/module.rst index 6fe1ce9e994832..63e3bed6727987 100644 --- a/Doc/c-api/module.rst +++ b/Doc/c-api/module.rst @@ -427,14 +427,14 @@ The available slot types are: This slot is ignored by Python builds not configured with :option:`--disable-gil`. Otherwise, it determines whether or not importing this module will cause the GIL to be automatically enabled. See - :envvar:`PYTHON_GIL` and :option:`-X gil <-X>` for more detail. + :ref:`free-threaded-cpython` for more detail. Multiple ``Py_mod_gil`` slots may not be specified in one module definition. If ``Py_mod_gil`` is not specified, the import machinery defaults to ``Py_MOD_GIL_USED``. - .. versionadded: 3.13 + .. versionadded:: 3.13 See :PEP:`489` for more details on multi-phase initialization. diff --git a/Doc/c-api/monitoring.rst b/Doc/c-api/monitoring.rst index 763ec8ef761e4e..b34035b5548f02 100644 --- a/Doc/c-api/monitoring.rst +++ b/Doc/c-api/monitoring.rst @@ -2,7 +2,7 @@ .. _monitoring: -Monitorong C API +Monitoring C API ================ Added in version 3.13. @@ -121,10 +121,10 @@ See :mod:`sys.monitoring` for descriptions of the events. :c:func:`PyErr_GetRaisedException`). -.. c:function:: int PyMonitoring_FireStopIterationEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset) +.. c:function:: int PyMonitoring_FireStopIterationEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset, PyObject *value) - Fire a ``STOP_ITERATION`` event with the current exception (as returned by - :c:func:`PyErr_GetRaisedException`). + Fire a ``STOP_ITERATION`` event. If ``value`` is an instance of :exc:`StopIteration`, it is used. Otherwise, + a new :exc:`StopIteration` instance is created with ``value`` as its argument. Managing the Monitoring State diff --git a/Doc/c-api/object.rst b/Doc/c-api/object.rst index ba454db9117504..8eeac3fc8a1e58 100644 --- a/Doc/c-api/object.rst +++ b/Doc/c-api/object.rst @@ -65,7 +65,7 @@ Object Protocol Properly handle returning :c:data:`Py_NotImplemented` from within a C function (that is, create a new :term:`strong reference` - to NotImplemented and return it). + to :const:`NotImplemented` and return it). .. c:macro:: Py_PRINT_RAW diff --git a/Doc/c-api/reflection.rst b/Doc/c-api/reflection.rst index 4b1c4770848a30..038e6977104560 100644 --- a/Doc/c-api/reflection.rst +++ b/Doc/c-api/reflection.rst @@ -7,18 +7,48 @@ Reflection .. c:function:: PyObject* PyEval_GetBuiltins(void) + .. deprecated:: 3.13 + + Use :c:func:`PyEval_GetFrameBuiltins` instead. + Return a dictionary of the builtins in the current execution frame, or the interpreter of the thread state if no frame is currently executing. .. c:function:: PyObject* PyEval_GetLocals(void) - Return a dictionary of the local variables in the current execution frame, + .. deprecated:: 3.13 + + Use either :c:func:`PyEval_GetFrameLocals` to obtain the same behaviour as calling + :func:`locals` in Python code, or else call :c:func:`PyFrame_GetLocals` on the result + of :c:func:`PyEval_GetFrame` to access the :attr:`~frame.f_locals` attribute of the + currently executing frame. + + Return a mapping providing access to the local variables in the current execution frame, or ``NULL`` if no frame is currently executing. + Refer to :func:`locals` for details of the mapping returned at different scopes. + + As this function returns a :term:`borrowed reference`, the dictionary returned for + :term:`optimized scopes ` is cached on the frame object and will remain + alive as long as the frame object does. Unlike :c:func:`PyEval_GetFrameLocals` and + :func:`locals`, subsequent calls to this function in the same frame will update the + contents of the cached dictionary to reflect changes in the state of the local variables + rather than returning a new snapshot. + + .. versionchanged:: 3.13 + As part of :pep:`667`, :c:func:`PyFrame_GetLocals`, :func:`locals`, and + :attr:`FrameType.f_locals ` no longer make use of the shared cache + dictionary. Refer to the :ref:`What's New entry ` for + additional details. + .. c:function:: PyObject* PyEval_GetGlobals(void) + .. deprecated:: 3.13 + + Use :c:func:`PyEval_GetFrameGlobals` instead. + Return a dictionary of the global variables in the current execution frame, or ``NULL`` if no frame is currently executing. @@ -31,6 +61,36 @@ Reflection See also :c:func:`PyThreadState_GetFrame`. +.. c:function:: PyObject* PyEval_GetFrameBuiltins(void) + + Return a dictionary of the builtins in the current execution frame, + or the interpreter of the thread state if no frame is currently executing. + + .. versionadded:: 3.13 + + +.. c:function:: PyObject* PyEval_GetFrameLocals(void) + + Return a dictionary of the local variables in the current execution frame, + or ``NULL`` if no frame is currently executing. Equivalent to calling + :func:`locals` in Python code. + + To access :attr:`~frame.f_locals` on the current frame without making an independent + snapshot in :term:`optimized scopes `, call :c:func:`PyFrame_GetLocals` + on the result of :c:func:`PyEval_GetFrame`. + + .. versionadded:: 3.13 + + +.. c:function:: PyObject* PyEval_GetFrameGlobals(void) + + Return a dictionary of the global variables in the current execution frame, + or ``NULL`` if no frame is currently executing. Equivalent to calling + :func:`globals` in Python code. + + .. versionadded:: 3.13 + + .. c:function:: const char* PyEval_GetFuncName(PyObject *func) Return the name of *func* if it is a function, class or instance object, else the diff --git a/Doc/c-api/weakref.rst b/Doc/c-api/weakref.rst index 038f54a9751fd1..ae0699383900c4 100644 --- a/Doc/c-api/weakref.rst +++ b/Doc/c-api/weakref.rst @@ -35,7 +35,7 @@ as much as it can. callable object that receives notification when *ob* is garbage collected; it should accept a single parameter, which will be the weak reference object itself. *callback* may also be ``None`` or ``NULL``. If *ob* is not a - weakly referencable object, or if *callback* is not callable, ``None``, or + weakly referenceable object, or if *callback* is not callable, ``None``, or ``NULL``, this will return ``NULL`` and raise :exc:`TypeError`. @@ -47,7 +47,7 @@ as much as it can. be a callable object that receives notification when *ob* is garbage collected; it should accept a single parameter, which will be the weak reference object itself. *callback* may also be ``None`` or ``NULL``. If *ob* - is not a weakly referencable object, or if *callback* is not callable, + is not a weakly referenceable object, or if *callback* is not callable, ``None``, or ``NULL``, this will return ``NULL`` and raise :exc:`TypeError`. diff --git a/Doc/conf.py b/Doc/conf.py index 86371d17ae742a..47fb96fe1de482 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -374,6 +374,8 @@ # Split the index html_split_index = True +# Split pot files one per reST file +gettext_compact = False # Options for LaTeX output # ------------------------ @@ -435,6 +437,10 @@ epub_author = 'Python Documentation Authors' epub_publisher = 'Python Software Foundation' +# index pages are not valid xhtml +# https://github.com/sphinx-doc/sphinx/issues/12359 +epub_use_index = False + # Options for the coverage checker # -------------------------------- diff --git a/Doc/data/refcounts.dat b/Doc/data/refcounts.dat index 62a96146d605ff..a7d06e076a1b55 100644 --- a/Doc/data/refcounts.dat +++ b/Doc/data/refcounts.dat @@ -790,6 +790,12 @@ PyEval_GetGlobals:PyObject*::0: PyEval_GetFrame:PyObject*::0: +PyEval_GetFrameBuiltins:PyObject*::+1: + +PyEval_GetFrameLocals:PyObject*::+1: + +PyEval_GetFrameGlobals:PyObject*::+1: + PyEval_GetFuncDesc:const char*::: PyEval_GetFuncDesc:PyObject*:func:0: @@ -916,6 +922,32 @@ PyFloat_FromString:PyObject*:str:0: PyFloat_GetInfo:PyObject*::+1: PyFloat_GetInfo::void:: +PyFrame_GetBack:PyObject*::+1: +PyFrame_GetBack:PyFrameObject*:frame:0: + +PyFrame_GetBuiltins:PyObject*::+1: +PyFrame_GetBuiltins:PyFrameObject*:frame:0: + +PyFrame_GetCode:PyObject*::+1: +PyFrame_GetCode:PyFrameObject*:frame:0: + +PyFrame_GetGenerator:PyObject*::+1: +PyFrame_GetGenerator:PyFrameObject*:frame:0: + +PyFrame_GetGlobals:PyObject*::+1: +PyFrame_GetGlobals:PyFrameObject*:frame:0: + +PyFrame_GetLocals:PyObject*::+1: +PyFrame_GetLocals:PyFrameObject*:frame:0: + +PyFrame_GetVar:PyObject*::+1: +PyFrame_GetVar:PyFrameObject*:frame:0: +PyFrame_GetVar:PyObject*:name:0: + +PyFrame_GetVarString:PyObject*::+1: +PyFrame_GetVarString:PyFrameObject*:frame:0: +PyFrame_GetVarString:const char*:name:: + PyFrozenSet_Check:int::: PyFrozenSet_Check:PyObject*:p:0: diff --git a/Doc/extending/extending.rst b/Doc/extending/extending.rst index b70e1b1fe57e67..b0493bed75b151 100644 --- a/Doc/extending/extending.rst +++ b/Doc/extending/extending.rst @@ -868,7 +868,7 @@ It is important to call :c:func:`free` at the right time. If a block's address is forgotten but :c:func:`free` is not called for it, the memory it occupies cannot be reused until the program terminates. This is called a :dfn:`memory leak`. On the other hand, if a program calls :c:func:`free` for a block and then -continues to use the block, it creates a conflict with re-use of the block +continues to use the block, it creates a conflict with reuse of the block through another :c:func:`malloc` call. This is called :dfn:`using freed memory`. It has the same bad consequences as referencing uninitialized data --- core dumps, wrong results, mysterious crashes. diff --git a/Doc/extending/newtypes.rst b/Doc/extending/newtypes.rst index 473a418809cff1..fd05c82b41629a 100644 --- a/Doc/extending/newtypes.rst +++ b/Doc/extending/newtypes.rst @@ -545,7 +545,7 @@ performance-critical objects (such as numbers). .. seealso:: Documentation for the :mod:`weakref` module. -For an object to be weakly referencable, the extension type must set the +For an object to be weakly referenceable, the extension type must set the ``Py_TPFLAGS_MANAGED_WEAKREF`` bit of the :c:member:`~PyTypeObject.tp_flags` field. The legacy :c:member:`~PyTypeObject.tp_weaklistoffset` field should be left as zero. diff --git a/Doc/faq/general.rst b/Doc/faq/general.rst index ec7c2897594999..eb859c5d5992da 100644 --- a/Doc/faq/general.rst +++ b/Doc/faq/general.rst @@ -122,6 +122,8 @@ available. Consult `the Python Package Index `_ to find packages of interest to you. +.. _faq-version-numbering-scheme: + How does the Python version numbering scheme work? -------------------------------------------------- @@ -183,8 +185,6 @@ information on getting the source code and compiling it. How do I get documentation on Python? ------------------------------------- -.. XXX mention py3k - The standard documentation for the current stable version of Python is available at https://docs.python.org/3/. PDF, plain text, and downloadable HTML versions are also available at https://docs.python.org/3/download.html. diff --git a/Doc/glossary.rst b/Doc/glossary.rst index 2846f77feb112d..ae9949bc2867c4 100644 --- a/Doc/glossary.rst +++ b/Doc/glossary.rst @@ -425,11 +425,11 @@ Glossary An object that tries to find the :term:`loader` for a module that is being imported. - Since Python 3.3, there are two types of finder: :term:`meta path finders + There are two types of finder: :term:`meta path finders ` for use with :data:`sys.meta_path`, and :term:`path entry finders ` for use with :data:`sys.path_hooks`. - See :pep:`302`, :pep:`420` and :pep:`451` for much more detail. + See :ref:`importsystem` and :mod:`importlib` for much more detail. floor division Mathematical division that rounds down to nearest integer. The floor @@ -438,6 +438,12 @@ Glossary division. Note that ``(-11) // 4`` is ``-3`` because that is ``-2.75`` rounded *downward*. See :pep:`238`. + free threading + A threading model where multiple threads can run Python bytecode + simultaneously within the same interpreter. This is in contrast to + the :term:`global interpreter lock` which allows only one thread to + execute Python bytecode at a time. See :pep:`703`. + function A series of statements which returns some value to a caller. It can also be passed zero or more :term:`arguments ` which may be used in @@ -889,6 +895,15 @@ Glossary (methods). Also the ultimate base class of any :term:`new-style class`. + optimized scope + A scope where target local variable names are reliably known to the + compiler when the code is compiled, allowing optimization of read and + write access to these names. The local namespaces for functions, + generators, coroutines, comprehensions, and generator expressions are + optimized in this fashion. Note: most interpreter optimizations are + applied to all scopes, only those relying on a known set of local + and nonlocal variable names are restricted to optimized scopes. + package A Python :term:`module` which can contain submodules or recursively, subpackages. Technically, a package is a Python module with a diff --git a/Doc/howto/mro.rst b/Doc/howto/mro.rst index a44ef6848af4f3..f44b4f98e570bd 100644 --- a/Doc/howto/mro.rst +++ b/Doc/howto/mro.rst @@ -426,7 +426,7 @@ In this case the MRO is GFEF and the local precedence ordering is preserved. As a general rule, hierarchies such as the previous one should be -avoided, since it is unclear if F should override E or viceversa. +avoided, since it is unclear if F should override E or vice-versa. Python 2.3 solves the ambiguity by raising an exception in the creation of class G, effectively stopping the programmer from generating ambiguous hierarchies. The reason for that is that the C3 algorithm diff --git a/Doc/howto/pyporting.rst b/Doc/howto/pyporting.rst index d560364107bd12..9f73c811cfcbc0 100644 --- a/Doc/howto/pyporting.rst +++ b/Doc/howto/pyporting.rst @@ -18,9 +18,9 @@ please see :ref:`cporting-howto`. The archived python-porting_ mailing list may contain some useful guidance. -Since Python 3.13 the original porting guide was discontinued. +Since Python 3.11 the original porting guide was discontinued. You can find the old guide in the -`archive `_. +`archive `_. Third-party guides diff --git a/Doc/library/__future__.rst b/Doc/library/__future__.rst index 762f8b4695b3dd..1ebff4409b1e95 100644 --- a/Doc/library/__future__.rst +++ b/Doc/library/__future__.rst @@ -1,5 +1,5 @@ -:mod:`__future__` --- Future statement definitions -================================================== +:mod:`!__future__` --- Future statement definitions +=================================================== .. module:: __future__ :synopsis: Future statement definitions diff --git a/Doc/library/__main__.rst b/Doc/library/__main__.rst index c999253f781b10..6232e173d9537d 100644 --- a/Doc/library/__main__.rst +++ b/Doc/library/__main__.rst @@ -1,5 +1,5 @@ -:mod:`__main__` --- Top-level code environment -============================================== +:mod:`!__main__` --- Top-level code environment +=============================================== .. module:: __main__ :synopsis: The environment where top-level code is run. Covers command-line diff --git a/Doc/library/_thread.rst b/Doc/library/_thread.rst index 297f50a46e0692..81f0cac947f602 100644 --- a/Doc/library/_thread.rst +++ b/Doc/library/_thread.rst @@ -1,5 +1,5 @@ -:mod:`_thread` --- Low-level threading API -========================================== +:mod:`!_thread` --- Low-level threading API +=========================================== .. module:: _thread :synopsis: Low-level threading API. @@ -169,14 +169,14 @@ Lock objects have the following methods: time can acquire a lock --- that's their reason for existence). If the *blocking* argument is present, the action depends on its - value: if it is False, the lock is only acquired if it can be acquired - immediately without waiting, while if it is True, the lock is acquired + value: if it is false, the lock is only acquired if it can be acquired + immediately without waiting, while if it is true, the lock is acquired unconditionally as above. If the floating-point *timeout* argument is present and positive, it specifies the maximum wait time in seconds before returning. A negative *timeout* argument specifies an unbounded wait. You cannot specify - a *timeout* if *blocking* is False. + a *timeout* if *blocking* is false. The return value is ``True`` if the lock is acquired successfully, ``False`` if not. diff --git a/Doc/library/abc.rst b/Doc/library/abc.rst index 10e2cba50e49b0..168ef3ec00d81b 100644 --- a/Doc/library/abc.rst +++ b/Doc/library/abc.rst @@ -1,5 +1,5 @@ -:mod:`abc` --- Abstract Base Classes -==================================== +:mod:`!abc` --- Abstract Base Classes +===================================== .. module:: abc :synopsis: Abstract base classes according to :pep:`3119`. diff --git a/Doc/library/allos.rst b/Doc/library/allos.rst index f7105d8af8e28b..0223c1054ea5d8 100644 --- a/Doc/library/allos.rst +++ b/Doc/library/allos.rst @@ -16,7 +16,6 @@ but they are available on most other systems as well. Here's an overview: io.rst time.rst argparse.rst - getopt.rst logging.rst logging.config.rst logging.handlers.rst diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst index eaddd44e2defd7..0367c83d9369d3 100644 --- a/Doc/library/argparse.rst +++ b/Doc/library/argparse.rst @@ -1,5 +1,5 @@ -:mod:`argparse` --- Parser for command-line options, arguments and sub-commands -=============================================================================== +:mod:`!argparse` --- Parser for command-line options, arguments and sub-commands +================================================================================ .. module:: argparse :synopsis: Command-line option and argument parsing library. diff --git a/Doc/library/array.rst b/Doc/library/array.rst index cdf21db8779fe8..d34a1888342e27 100644 --- a/Doc/library/array.rst +++ b/Doc/library/array.rst @@ -1,5 +1,5 @@ -:mod:`array` --- Efficient arrays of numeric values -=================================================== +:mod:`!array` --- Efficient arrays of numeric values +==================================================== .. module:: array :synopsis: Space efficient arrays of uniformly typed numeric values. diff --git a/Doc/library/ast.rst b/Doc/library/ast.rst index 02dc7c86082502..d4ccf282a5d00a 100644 --- a/Doc/library/ast.rst +++ b/Doc/library/ast.rst @@ -1,5 +1,5 @@ -:mod:`ast` --- Abstract Syntax Trees -==================================== +:mod:`!ast` --- Abstract Syntax Trees +===================================== .. module:: ast :synopsis: Abstract Syntax Tree classes and manipulation. @@ -120,7 +120,8 @@ Node classes If a field that is optional in the grammar is omitted from the constructor, it defaults to ``None``. If a list field is omitted, it defaults to the empty - list. If any other field is omitted, a :exc:`DeprecationWarning` is raised + list. If a field of type :class:`!ast.expr_context` is omitted, it defaults to + :class:`Load() `. If any other field is omitted, a :exc:`DeprecationWarning` is raised and the AST node will not have this field. In Python 3.15, this condition will raise an error. @@ -596,8 +597,7 @@ Expressions * ``keywords`` holds a list of :class:`.keyword` objects representing arguments passed by keyword. - When creating a ``Call`` node, ``args`` and ``keywords`` are required, but - they can be empty lists. + The ``args`` and ``keywords`` arguments are optional and default to empty lists. .. doctest:: diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index d6ed817b13676f..374e789e91e790 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -796,7 +796,7 @@ Creating network servers :class:`str`, :class:`bytes`, and :class:`~pathlib.Path` paths are supported. - If *cleanup_socket* is True then the Unix socket will automatically + If *cleanup_socket* is true then the Unix socket will automatically be removed from the filesystem when the server is closed, unless the socket has been replaced after the server has been created. diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst index 188649a2968df8..c5deac7e2748ae 100644 --- a/Doc/library/asyncio-task.rst +++ b/Doc/library/asyncio-task.rst @@ -538,7 +538,7 @@ Running Tasks Concurrently # [2, 6, 24] .. note:: - If *return_exceptions* is False, cancelling gather() after it + If *return_exceptions* is false, cancelling gather() after it has been marked done won't cancel any submitted awaitables. For instance, gather can be marked done after propagating an exception to the caller, therefore, calling ``gather.cancel()`` diff --git a/Doc/library/asyncio.rst b/Doc/library/asyncio.rst index 5f33c6813e74c0..184f981c1021aa 100644 --- a/Doc/library/asyncio.rst +++ b/Doc/library/asyncio.rst @@ -1,5 +1,5 @@ -:mod:`asyncio` --- Asynchronous I/O -=================================== +:mod:`!asyncio` --- Asynchronous I/O +==================================== .. module:: asyncio :synopsis: Asynchronous I/O. diff --git a/Doc/library/atexit.rst b/Doc/library/atexit.rst index 43a8bd2d7cd133..02d2f0807df8f6 100644 --- a/Doc/library/atexit.rst +++ b/Doc/library/atexit.rst @@ -1,5 +1,5 @@ -:mod:`atexit` --- Exit handlers -=============================== +:mod:`!atexit` --- Exit handlers +================================ .. module:: atexit :synopsis: Register and execute cleanup functions. diff --git a/Doc/library/base64.rst b/Doc/library/base64.rst index e596893358f3fb..834ab2536e6c14 100644 --- a/Doc/library/base64.rst +++ b/Doc/library/base64.rst @@ -1,5 +1,5 @@ -:mod:`base64` --- Base16, Base32, Base64, Base85 Data Encodings -=============================================================== +:mod:`!base64` --- Base16, Base32, Base64, Base85 Data Encodings +================================================================ .. module:: base64 :synopsis: RFC 4648: Base16, Base32, Base64 Data Encodings; @@ -193,7 +193,7 @@ The modern interface provides: *wrapcol* controls whether the output should have newline (``b'\n'``) characters added to it. If this is non-zero, each output line will be - at most this many characters long. + at most this many characters long, excluding the trailing newline. *pad* controls whether the input is padded to a multiple of 4 before encoding. Note that the ``btoa`` implementation always pads. diff --git a/Doc/library/bdb.rst b/Doc/library/bdb.rst index 504fbd0464515b..85df7914a9a014 100644 --- a/Doc/library/bdb.rst +++ b/Doc/library/bdb.rst @@ -1,5 +1,5 @@ -:mod:`bdb` --- Debugger framework -================================= +:mod:`!bdb` --- Debugger framework +================================== .. module:: bdb :synopsis: Debugger framework. @@ -86,7 +86,7 @@ The :mod:`bdb` module also defines two classes: .. attribute:: temporary - True if a :class:`Breakpoint` at (file, line) is temporary. + ``True`` if a :class:`Breakpoint` at (file, line) is temporary. .. attribute:: cond @@ -99,7 +99,7 @@ The :mod:`bdb` module also defines two classes: .. attribute:: enabled - True if :class:`Breakpoint` is enabled. + ``True`` if :class:`Breakpoint` is enabled. .. attribute:: bpbynumber @@ -215,22 +215,22 @@ The :mod:`bdb` module also defines two classes: .. method:: is_skipped_line(module_name) - Return True if *module_name* matches any skip pattern. + Return ``True`` if *module_name* matches any skip pattern. .. method:: stop_here(frame) - Return True if *frame* is below the starting frame in the stack. + Return ``True`` if *frame* is below the starting frame in the stack. .. method:: break_here(frame) - Return True if there is an effective breakpoint for this line. + Return ``True`` if there is an effective breakpoint for this line. Check whether a line or function breakpoint exists and is in effect. Delete temporary breakpoints based on information from :func:`effective`. .. method:: break_anywhere(frame) - Return True if any breakpoint exists for *frame*'s filename. + Return ``True`` if any breakpoint exists for *frame*'s filename. Derived classes should override these methods to gain control over debugger operation. @@ -348,7 +348,7 @@ The :mod:`bdb` module also defines two classes: .. method:: get_break(filename, lineno) - Return True if there is a breakpoint for *lineno* in *filename*. + Return ``True`` if there is a breakpoint for *lineno* in *filename*. .. method:: get_breaks(filename, lineno) @@ -412,7 +412,7 @@ Finally, the module defines the following functions: .. function:: checkfuncname(b, frame) - Return True if we should break here, depending on the way the + Return ``True`` if we should break here, depending on the way the :class:`Breakpoint` *b* was set. If it was set via line number, it checks if @@ -431,14 +431,14 @@ Finally, the module defines the following functions: :attr:`bplist ` for the (:attr:`file `, :attr:`line `) (which must exist) that is :attr:`enabled `, for - which :func:`checkfuncname` is True, and that has neither a False + which :func:`checkfuncname` is true, and that has neither a false :attr:`condition ` nor positive :attr:`ignore ` count. The *flag*, meaning that a - temporary breakpoint should be deleted, is False only when the + temporary breakpoint should be deleted, is ``False`` only when the :attr:`cond ` cannot be evaluated (in which case, :attr:`ignore ` count is ignored). - If no such entry exists, then (None, None) is returned. + If no such entry exists, then ``(None, None)`` is returned. .. function:: set_trace() diff --git a/Doc/library/binascii.rst b/Doc/library/binascii.rst index 39fabb59bb1984..1bab785684bbab 100644 --- a/Doc/library/binascii.rst +++ b/Doc/library/binascii.rst @@ -1,5 +1,5 @@ -:mod:`binascii` --- Convert between binary and ASCII -==================================================== +:mod:`!binascii` --- Convert between binary and ASCII +===================================================== .. module:: binascii :synopsis: Tools for converting between binary and various ASCII-encoded binary diff --git a/Doc/library/bisect.rst b/Doc/library/bisect.rst index 31c79b91061591..78da563397b625 100644 --- a/Doc/library/bisect.rst +++ b/Doc/library/bisect.rst @@ -1,5 +1,5 @@ -:mod:`bisect` --- Array bisection algorithm -=========================================== +:mod:`!bisect` --- Array bisection algorithm +============================================ .. module:: bisect :synopsis: Array bisection algorithms for binary searching. diff --git a/Doc/library/builtins.rst b/Doc/library/builtins.rst index 7e4f8fe0531567..644344e7fef29a 100644 --- a/Doc/library/builtins.rst +++ b/Doc/library/builtins.rst @@ -1,5 +1,5 @@ -:mod:`builtins` --- Built-in objects -==================================== +:mod:`!builtins` --- Built-in objects +===================================== .. module:: builtins :synopsis: The module that provides the built-in namespace. diff --git a/Doc/library/bz2.rst b/Doc/library/bz2.rst index eaf0a096455fad..ebe2e43febaefa 100644 --- a/Doc/library/bz2.rst +++ b/Doc/library/bz2.rst @@ -1,5 +1,5 @@ -:mod:`bz2` --- Support for :program:`bzip2` compression -======================================================= +:mod:`!bz2` --- Support for :program:`bzip2` compression +======================================================== .. module:: bz2 :synopsis: Interfaces for bzip2 compression and decompression. diff --git a/Doc/library/calendar.rst b/Doc/library/calendar.rst index e699a7284ac802..d5876054da3eee 100644 --- a/Doc/library/calendar.rst +++ b/Doc/library/calendar.rst @@ -1,5 +1,5 @@ -:mod:`calendar` --- General calendar-related functions -====================================================== +:mod:`!calendar` --- General calendar-related functions +======================================================= .. module:: calendar :synopsis: Functions for working with calendars, including some emulation diff --git a/Doc/library/cmath.rst b/Doc/library/cmath.rst index fdac51d9603ceb..381a8332f4b187 100644 --- a/Doc/library/cmath.rst +++ b/Doc/library/cmath.rst @@ -1,5 +1,5 @@ -:mod:`cmath` --- Mathematical functions for complex numbers -=========================================================== +:mod:`!cmath` --- Mathematical functions for complex numbers +============================================================ .. module:: cmath :synopsis: Mathematical functions for complex numbers. @@ -43,10 +43,7 @@ Conversions to and from polar coordinates A Python complex number ``z`` is stored internally using *rectangular* or *Cartesian* coordinates. It is completely determined by its *real -part* ``z.real`` and its *imaginary part* ``z.imag``. In other -words:: - - z == z.real + z.imag*1j +part* ``z.real`` and its *imaginary part* ``z.imag``. *Polar coordinates* give an alternative way to represent a complex number. In polar coordinates, a complex number *z* is defined by the @@ -90,7 +87,7 @@ rectangular coordinates to polar coordinates and back. .. function:: rect(r, phi) Return the complex number *x* with polar coordinates *r* and *phi*. - Equivalent to ``r * (math.cos(phi) + math.sin(phi)*1j)``. + Equivalent to ``complex(r * math.cos(phi), r * math.sin(phi))``. Power and logarithmic functions diff --git a/Doc/library/cmd.rst b/Doc/library/cmd.rst index 39ef4b481478d1..66544f82f6ff3f 100644 --- a/Doc/library/cmd.rst +++ b/Doc/library/cmd.rst @@ -1,5 +1,5 @@ -:mod:`cmd` --- Support for line-oriented command interpreters -============================================================= +:mod:`!cmd` --- Support for line-oriented command interpreters +============================================================== .. module:: cmd :synopsis: Build line-oriented command interpreters. diff --git a/Doc/library/code.rst b/Doc/library/code.rst index 8cb604cf48ff0b..8f7692df9fb22d 100644 --- a/Doc/library/code.rst +++ b/Doc/library/code.rst @@ -1,5 +1,5 @@ -:mod:`code` --- Interpreter base classes -======================================== +:mod:`!code` --- Interpreter base classes +========================================= .. module:: code :synopsis: Facilities to implement read-eval-print loops. @@ -18,16 +18,16 @@ build applications which provide an interactive interpreter prompt. This class deals with parsing and interpreter state (the user's namespace); it does not deal with input buffering or prompting or input file naming (the filename is always passed in explicitly). The optional *locals* argument - specifies the dictionary in which code will be executed; it defaults to a newly - created dictionary with key ``'__name__'`` set to ``'__console__'`` and key - ``'__doc__'`` set to ``None``. + specifies a mapping to use as the namespace in which code will be executed; + it defaults to a newly created dictionary with key ``'__name__'`` set to + ``'__console__'`` and key ``'__doc__'`` set to ``None``. .. class:: InteractiveConsole(locals=None, filename="", local_exit=False) Closely emulate the behavior of the interactive Python interpreter. This class builds on :class:`InteractiveInterpreter` and adds prompting using the familiar - ``sys.ps1`` and ``sys.ps2``, and input buffering. If *local_exit* is True, + ``sys.ps1`` and ``sys.ps2``, and input buffering. If *local_exit* is true, ``exit()`` and ``quit()`` in the console will not raise :exc:`SystemExit`, but instead return to the calling code. diff --git a/Doc/library/codecs.rst b/Doc/library/codecs.rst index 010ae25557a9c9..2cfd8a1eaee806 100644 --- a/Doc/library/codecs.rst +++ b/Doc/library/codecs.rst @@ -1,5 +1,5 @@ -:mod:`codecs` --- Codec registry and base classes -================================================= +:mod:`!codecs` --- Codec registry and base classes +================================================== .. module:: codecs :synopsis: Encode and decode data and streams. diff --git a/Doc/library/codeop.rst b/Doc/library/codeop.rst index 55606e1c5f09ac..16f674adb4b22b 100644 --- a/Doc/library/codeop.rst +++ b/Doc/library/codeop.rst @@ -1,5 +1,5 @@ -:mod:`codeop` --- Compile Python code -===================================== +:mod:`!codeop` --- Compile Python code +====================================== .. module:: codeop :synopsis: Compile (possibly incomplete) Python code. diff --git a/Doc/library/collections.abc.rst b/Doc/library/collections.abc.rst index 7bcaba60c6ddbd..ea27436f67f0bf 100644 --- a/Doc/library/collections.abc.rst +++ b/Doc/library/collections.abc.rst @@ -1,5 +1,5 @@ -:mod:`collections.abc` --- Abstract Base Classes for Containers -=============================================================== +:mod:`!collections.abc` --- Abstract Base Classes for Containers +================================================================ .. module:: collections.abc :synopsis: Abstract base classes for containers diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst index 19b89ad79c7414..2a269712f1814d 100644 --- a/Doc/library/collections.rst +++ b/Doc/library/collections.rst @@ -1,5 +1,5 @@ -:mod:`collections` --- Container datatypes -========================================== +:mod:`!collections` --- Container datatypes +=========================================== .. module:: collections :synopsis: Container datatypes diff --git a/Doc/library/colorsys.rst b/Doc/library/colorsys.rst index b672a05b39145d..125d62b174088a 100644 --- a/Doc/library/colorsys.rst +++ b/Doc/library/colorsys.rst @@ -1,5 +1,5 @@ -:mod:`colorsys` --- Conversions between color systems -===================================================== +:mod:`!colorsys` --- Conversions between color systems +====================================================== .. module:: colorsys :synopsis: Conversion functions between RGB and other color systems. diff --git a/Doc/library/compileall.rst b/Doc/library/compileall.rst index df1eefab839cc1..d9c0cb67a92aa7 100644 --- a/Doc/library/compileall.rst +++ b/Doc/library/compileall.rst @@ -1,5 +1,5 @@ -:mod:`compileall` --- Byte-compile Python libraries -=================================================== +:mod:`!compileall` --- Byte-compile Python libraries +==================================================== .. module:: compileall :synopsis: Tools for byte-compiling all Python source files in a directory tree. @@ -226,7 +226,7 @@ Public functions The *invalidation_mode* parameter was added. .. versionchanged:: 3.7.2 - The *invalidation_mode* parameter's default value is updated to None. + The *invalidation_mode* parameter's default value is updated to ``None``. .. versionchanged:: 3.8 Setting *workers* to 0 now chooses the optimal number of cores. @@ -289,7 +289,7 @@ Public functions The *invalidation_mode* parameter was added. .. versionchanged:: 3.7.2 - The *invalidation_mode* parameter's default value is updated to None. + The *invalidation_mode* parameter's default value is updated to ``None``. .. versionchanged:: 3.9 Added *stripdir*, *prependdir*, *limit_sl_dest* and *hardlink_dupes* arguments. @@ -318,7 +318,7 @@ Public functions The *invalidation_mode* parameter was added. .. versionchanged:: 3.7.2 - The *invalidation_mode* parameter's default value is updated to None. + The *invalidation_mode* parameter's default value is updated to ``None``. To force a recompile of all the :file:`.py` files in the :file:`Lib/` subdirectory and all its subdirectories:: diff --git a/Doc/library/concurrent.futures.rst b/Doc/library/concurrent.futures.rst index d3c7a40aa9d390..e3b24451188cc4 100644 --- a/Doc/library/concurrent.futures.rst +++ b/Doc/library/concurrent.futures.rst @@ -1,5 +1,5 @@ -:mod:`concurrent.futures` --- Launching parallel tasks -====================================================== +:mod:`!concurrent.futures` --- Launching parallel tasks +======================================================= .. module:: concurrent.futures :synopsis: Execute computations concurrently using threads or processes. diff --git a/Doc/library/configparser.rst b/Doc/library/configparser.rst index 9e7638d087a7ce..e84fb513e45267 100644 --- a/Doc/library/configparser.rst +++ b/Doc/library/configparser.rst @@ -1,5 +1,5 @@ -:mod:`configparser` --- Configuration file parser -================================================= +:mod:`!configparser` --- Configuration file parser +================================================== .. module:: configparser :synopsis: Configuration file parser. diff --git a/Doc/library/contextlib.rst b/Doc/library/contextlib.rst index 73e53aec9cbf1c..27cf99446e5980 100644 --- a/Doc/library/contextlib.rst +++ b/Doc/library/contextlib.rst @@ -314,7 +314,9 @@ Functions and classes provided: If the code within the :keyword:`!with` block raises a :exc:`BaseExceptionGroup`, suppressed exceptions are removed from the - group. If any exceptions in the group are not suppressed, a group containing them is re-raised. + group. Any exceptions of the group which are not suppressed are re-raised in + a new group which is created using the original group's :meth:`~BaseExceptionGroup.derive` + method. .. versionadded:: 3.4 @@ -796,7 +798,7 @@ executing that callback:: if result: stack.pop_all() -This allows the intended cleanup up behaviour to be made explicit up front, +This allows the intended cleanup behaviour to be made explicit up front, rather than requiring a separate flag variable. If a particular application uses this pattern a lot, it can be simplified diff --git a/Doc/library/contextvars.rst b/Doc/library/contextvars.rst index 647832447de946..8ae386b489fb4e 100644 --- a/Doc/library/contextvars.rst +++ b/Doc/library/contextvars.rst @@ -1,5 +1,5 @@ -:mod:`contextvars` --- Context Variables -======================================== +:mod:`!contextvars` --- Context Variables +========================================= .. module:: contextvars :synopsis: Context Variables diff --git a/Doc/library/copy.rst b/Doc/library/copy.rst index 74333b2e934814..95b41f988a035b 100644 --- a/Doc/library/copy.rst +++ b/Doc/library/copy.rst @@ -1,5 +1,5 @@ -:mod:`copy` --- Shallow and deep copy operations -================================================ +:mod:`!copy` --- Shallow and deep copy operations +================================================= .. module:: copy :synopsis: Shallow and deep copy operations. diff --git a/Doc/library/copyreg.rst b/Doc/library/copyreg.rst index 2a28c043f80723..6e3144824ebe91 100644 --- a/Doc/library/copyreg.rst +++ b/Doc/library/copyreg.rst @@ -1,5 +1,5 @@ -:mod:`copyreg` --- Register :mod:`pickle` support functions -=========================================================== +:mod:`!copyreg` --- Register :mod:`!pickle` support functions +============================================================= .. module:: copyreg :synopsis: Register pickle support functions. diff --git a/Doc/library/csv.rst b/Doc/library/csv.rst index d17468023c6de1..533cdf13974be6 100644 --- a/Doc/library/csv.rst +++ b/Doc/library/csv.rst @@ -1,5 +1,5 @@ -:mod:`csv` --- CSV File Reading and Writing -=========================================== +:mod:`!csv` --- CSV File Reading and Writing +============================================ .. module:: csv :synopsis: Write and read tabular data to and from delimited files. @@ -349,8 +349,8 @@ The :mod:`csv` module defines the following constants: ``None``. This is similar to :data:`QUOTE_ALL`, except that if a field value is ``None`` an empty (unquoted) string is written. - Instructs :class:`reader` objects to interpret an empty (unquoted) field as None and - to otherwise behave as :data:`QUOTE_ALL`. + Instructs :class:`reader` objects to interpret an empty (unquoted) field + as ``None`` and to otherwise behave as :data:`QUOTE_ALL`. .. versionadded:: 3.12 diff --git a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst index 1deaa3a0f3899f..820535e3cba106 100644 --- a/Doc/library/ctypes.rst +++ b/Doc/library/ctypes.rst @@ -1,5 +1,5 @@ -:mod:`ctypes` --- A foreign function library for Python -======================================================= +:mod:`!ctypes` --- A foreign function library for Python +======================================================== .. module:: ctypes :synopsis: A foreign function library for Python. diff --git a/Doc/library/curses.ascii.rst b/Doc/library/curses.ascii.rst index 410b76e77c025b..cb895664ff1b11 100644 --- a/Doc/library/curses.ascii.rst +++ b/Doc/library/curses.ascii.rst @@ -1,5 +1,5 @@ -:mod:`curses.ascii` --- Utilities for ASCII characters -====================================================== +:mod:`!curses.ascii` --- Utilities for ASCII characters +======================================================= .. module:: curses.ascii :synopsis: Constants and set-membership functions for ASCII characters. diff --git a/Doc/library/curses.panel.rst b/Doc/library/curses.panel.rst index d770c03c8375f4..11fd841d381f69 100644 --- a/Doc/library/curses.panel.rst +++ b/Doc/library/curses.panel.rst @@ -1,5 +1,5 @@ -:mod:`curses.panel` --- A panel stack extension for curses -========================================================== +:mod:`!curses.panel` --- A panel stack extension for curses +=========================================================== .. module:: curses.panel :synopsis: A panel stack extension that adds depth to curses windows. diff --git a/Doc/library/curses.rst b/Doc/library/curses.rst index 550872ce2ca59e..91ea6150fb15ba 100644 --- a/Doc/library/curses.rst +++ b/Doc/library/curses.rst @@ -1,5 +1,5 @@ -:mod:`curses` --- Terminal handling for character-cell displays -=============================================================== +:mod:`!curses` --- Terminal handling for character-cell displays +================================================================ .. module:: curses :synopsis: An interface to the curses library, providing portable @@ -924,7 +924,7 @@ the following methods and attributes: .. method:: window.getbegyx() - Return a tuple ``(y, x)`` of co-ordinates of upper-left corner. + Return a tuple ``(y, x)`` of coordinates of upper-left corner. .. method:: window.getbkgd() diff --git a/Doc/library/dataclasses.rst b/Doc/library/dataclasses.rst index 7a8b83e1384e5f..fcb5e8bad295a0 100644 --- a/Doc/library/dataclasses.rst +++ b/Doc/library/dataclasses.rst @@ -185,7 +185,10 @@ Module contents - *slots*: If true (the default is ``False``), :attr:`~object.__slots__` attribute will be generated and new class will be returned instead of the original one. If :attr:`!__slots__` is already defined in the class, then :exc:`TypeError` - is raised. + is raised. Calling no-arg :func:`super` in dataclasses using ``slots=True`` will result in + the following exception being raised: + ``TypeError: super(type, obj): obj must be an instance or subtype of type``. + The two-arg :func:`super` is a valid workaround. See :gh:`90562` for full details. .. versionadded:: 3.10 @@ -278,7 +281,7 @@ Module contents generated equality and comparison methods (:meth:`~object.__eq__`, :meth:`~object.__gt__`, et al.). - - *metadata*: This can be a mapping or None. None is treated as + - *metadata*: This can be a mapping or ``None``. ``None`` is treated as an empty dict. This value is wrapped in :func:`~types.MappingProxyType` to make it read-only, and exposed on the :class:`Field` object. It is not used at all by Data @@ -458,8 +461,8 @@ Module contents .. function:: is_dataclass(obj) - Return ``True`` if its parameter is a dataclass or an instance of one, - otherwise return ``False``. + Return ``True`` if its parameter is a dataclass (including subclasses of a + dataclass) or an instance of one, otherwise return ``False``. If you need to know if a class is an instance of a dataclass (and not a dataclass itself), then add a further check for ``not @@ -615,7 +618,9 @@ methods will raise a :exc:`FrozenInstanceError` when invoked. There is a tiny performance penalty when using ``frozen=True``: :meth:`~object.__init__` cannot use simple assignment to initialize fields, and -must use :meth:`!__setattr__`. +must use :meth:`!object.__setattr__`. + +.. Make sure to not remove "object" from "object.__setattr__" in the above markup! .. _dataclasses-inheritance: diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst index b20b2027cfcc93..0723d0fe2fceb0 100644 --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -1,5 +1,5 @@ -:mod:`datetime` --- Basic date and time types -============================================= +:mod:`!datetime` --- Basic date and time types +============================================== .. module:: datetime :synopsis: Basic date and time types. diff --git a/Doc/library/dbm.rst b/Doc/library/dbm.rst index 54627363ba76ae..77148a558d1909 100644 --- a/Doc/library/dbm.rst +++ b/Doc/library/dbm.rst @@ -1,5 +1,5 @@ -:mod:`dbm` --- Interfaces to Unix "databases" -============================================= +:mod:`!dbm` --- Interfaces to Unix "databases" +============================================== .. module:: dbm :synopsis: Interfaces to various Unix "database" formats. diff --git a/Doc/library/decimal.rst b/Doc/library/decimal.rst index 3c51dbc04dc92e..3e33581f96f16a 100644 --- a/Doc/library/decimal.rst +++ b/Doc/library/decimal.rst @@ -1,5 +1,5 @@ -:mod:`decimal` --- Decimal fixed point and floating point arithmetic -==================================================================== +:mod:`!decimal` --- Decimal fixed point and floating point arithmetic +===================================================================== .. module:: decimal :synopsis: Implementation of the General Decimal Arithmetic Specification. diff --git a/Doc/library/difflib.rst b/Doc/library/difflib.rst index 6a34f2e4d7f5e1..ce948a6860f02c 100644 --- a/Doc/library/difflib.rst +++ b/Doc/library/difflib.rst @@ -1,5 +1,5 @@ -:mod:`difflib` --- Helpers for computing deltas -=============================================== +:mod:`!difflib` --- Helpers for computing deltas +================================================ .. module:: difflib :synopsis: Helpers for computing differences between objects. diff --git a/Doc/library/dis.rst b/Doc/library/dis.rst index e255fad55e4a25..ca215888f09a74 100644 --- a/Doc/library/dis.rst +++ b/Doc/library/dis.rst @@ -1,5 +1,5 @@ -:mod:`dis` --- Disassembler for Python bytecode -=============================================== +:mod:`!dis` --- Disassembler for Python bytecode +================================================ .. module:: dis :synopsis: Disassembler for Python bytecode. @@ -336,9 +336,10 @@ operation is being performed, so the intermediate analysis object isn't useful: Added the *show_caches* and *adaptive* parameters. .. versionchanged:: 3.13 - The *show_caches* parameter is deprecated and has no effect. The *cache_info* - field of each instruction is populated regardless of its value. - + The *show_caches* parameter is deprecated and has no effect. The iterator + generates the :class:`Instruction` instances with the *cache_info* + field populated (regardless of the value of *show_caches*) and it no longer + generates separate items for the cache entries. .. function:: findlinestarts(code) diff --git a/Doc/library/doctest.rst b/Doc/library/doctest.rst index 5f7d10a6dce037..6b0282eed49566 100644 --- a/Doc/library/doctest.rst +++ b/Doc/library/doctest.rst @@ -1,5 +1,5 @@ -:mod:`doctest` --- Test interactive Python examples -=================================================== +:mod:`!doctest` --- Test interactive Python examples +==================================================== .. module:: doctest :synopsis: Test pieces of code within docstrings. diff --git a/Doc/library/email.charset.rst b/Doc/library/email.charset.rst index aa0134412f3a60..6875af2be49d7a 100644 --- a/Doc/library/email.charset.rst +++ b/Doc/library/email.charset.rst @@ -1,5 +1,5 @@ -:mod:`email.charset`: Representing character sets -------------------------------------------------- +:mod:`!email.charset`: Representing character sets +-------------------------------------------------- .. module:: email.charset :synopsis: Character Sets diff --git a/Doc/library/email.contentmanager.rst b/Doc/library/email.contentmanager.rst index 5b49339650f0e9..34121f8c0a7727 100644 --- a/Doc/library/email.contentmanager.rst +++ b/Doc/library/email.contentmanager.rst @@ -1,5 +1,5 @@ -:mod:`email.contentmanager`: Managing MIME Content --------------------------------------------------- +:mod:`!email.contentmanager`: Managing MIME Content +--------------------------------------------------- .. module:: email.contentmanager :synopsis: Storing and Retrieving Content from MIME Parts diff --git a/Doc/library/email.encoders.rst b/Doc/library/email.encoders.rst index 3bd377e33f6c15..9c8c8c9234ed7a 100644 --- a/Doc/library/email.encoders.rst +++ b/Doc/library/email.encoders.rst @@ -1,5 +1,5 @@ -:mod:`email.encoders`: Encoders -------------------------------- +:mod:`!email.encoders`: Encoders +-------------------------------- .. module:: email.encoders :synopsis: Encoders for email message payloads. diff --git a/Doc/library/email.errors.rst b/Doc/library/email.errors.rst index 56aea6598b8615..33ab4265116178 100644 --- a/Doc/library/email.errors.rst +++ b/Doc/library/email.errors.rst @@ -1,5 +1,5 @@ -:mod:`email.errors`: Exception and Defect classes -------------------------------------------------- +:mod:`!email.errors`: Exception and Defect classes +-------------------------------------------------- .. module:: email.errors :synopsis: The exception classes used by the email package. diff --git a/Doc/library/email.generator.rst b/Doc/library/email.generator.rst index afa0038ea2d6c4..a3132d02687bc9 100644 --- a/Doc/library/email.generator.rst +++ b/Doc/library/email.generator.rst @@ -1,5 +1,5 @@ -:mod:`email.generator`: Generating MIME documents -------------------------------------------------- +:mod:`!email.generator`: Generating MIME documents +-------------------------------------------------- .. module:: email.generator :synopsis: Generate flat text email messages from a message structure. diff --git a/Doc/library/email.header.rst b/Doc/library/email.header.rst index e093f138936b36..6e230d5faf1654 100644 --- a/Doc/library/email.header.rst +++ b/Doc/library/email.header.rst @@ -1,5 +1,5 @@ -:mod:`email.header`: Internationalized headers ----------------------------------------------- +:mod:`!email.header`: Internationalized headers +----------------------------------------------- .. module:: email.header :synopsis: Representing non-ASCII headers diff --git a/Doc/library/email.headerregistry.rst b/Doc/library/email.headerregistry.rst index 00a954e0307ea6..bcbd00c833e28e 100644 --- a/Doc/library/email.headerregistry.rst +++ b/Doc/library/email.headerregistry.rst @@ -1,5 +1,5 @@ -:mod:`email.headerregistry`: Custom Header Objects --------------------------------------------------- +:mod:`!email.headerregistry`: Custom Header Objects +--------------------------------------------------- .. module:: email.headerregistry :synopsis: Automatic Parsing of headers based on the field name diff --git a/Doc/library/email.iterators.rst b/Doc/library/email.iterators.rst index d53ab33b8904a7..090981d84b4de3 100644 --- a/Doc/library/email.iterators.rst +++ b/Doc/library/email.iterators.rst @@ -1,5 +1,5 @@ -:mod:`email.iterators`: Iterators ---------------------------------- +:mod:`!email.iterators`: Iterators +---------------------------------- .. module:: email.iterators :synopsis: Iterate over a message object tree. diff --git a/Doc/library/email.message.rst b/Doc/library/email.message.rst index adea067e082615..e9cce1af186526 100644 --- a/Doc/library/email.message.rst +++ b/Doc/library/email.message.rst @@ -1,5 +1,5 @@ -:mod:`email.message`: Representing an email message ---------------------------------------------------- +:mod:`!email.message`: Representing an email message +---------------------------------------------------- .. module:: email.message :synopsis: The base class representing email messages. @@ -41,7 +41,7 @@ The :class:`EmailMessage` dictionary-like interface is indexed by the header names, which must be ASCII values. The values of the dictionary are strings with some extra methods. Headers are stored and returned in case-preserving form, but field names are matched case-insensitively. The keys are ordered, -but unlike a real dict, there can be duplicates. Addtional methods are +but unlike a real dict, there can be duplicates. Additional methods are provided for working with headers that have duplicate keys. The *payload* is either a string or bytes object, in the case of simple message diff --git a/Doc/library/email.mime.rst b/Doc/library/email.mime.rst index dc0dd3b9eebde6..b85673a4acd0d0 100644 --- a/Doc/library/email.mime.rst +++ b/Doc/library/email.mime.rst @@ -1,5 +1,5 @@ -:mod:`email.mime`: Creating email and MIME objects from scratch ---------------------------------------------------------------- +:mod:`!email.mime`: Creating email and MIME objects from scratch +---------------------------------------------------------------- .. module:: email.mime :synopsis: Build MIME messages. diff --git a/Doc/library/email.parser.rst b/Doc/library/email.parser.rst index dda0466a6afa7d..439b5c8f34b65a 100644 --- a/Doc/library/email.parser.rst +++ b/Doc/library/email.parser.rst @@ -1,5 +1,5 @@ -:mod:`email.parser`: Parsing email messages -------------------------------------------- +:mod:`!email.parser`: Parsing email messages +-------------------------------------------- .. module:: email.parser :synopsis: Parse flat text email messages to produce a message object structure. diff --git a/Doc/library/email.policy.rst b/Doc/library/email.policy.rst index f4777bb2462138..83feedf728351e 100644 --- a/Doc/library/email.policy.rst +++ b/Doc/library/email.policy.rst @@ -1,5 +1,5 @@ -:mod:`email.policy`: Policy Objects ------------------------------------ +:mod:`!email.policy`: Policy Objects +------------------------------------ .. module:: email.policy :synopsis: Controlling the parsing and generating of messages diff --git a/Doc/library/email.rst b/Doc/library/email.rst index 3a6039004fcaae..66c42e4a5008ee 100644 --- a/Doc/library/email.rst +++ b/Doc/library/email.rst @@ -1,5 +1,5 @@ -:mod:`email` --- An email and MIME handling package -=================================================== +:mod:`!email` --- An email and MIME handling package +==================================================== .. module:: email :synopsis: Package supporting the parsing, manipulating, and generating diff --git a/Doc/library/email.utils.rst b/Doc/library/email.utils.rst index d693a9bc3933b5..6f0bed130bc64c 100644 --- a/Doc/library/email.utils.rst +++ b/Doc/library/email.utils.rst @@ -1,5 +1,5 @@ -:mod:`email.utils`: Miscellaneous utilities -------------------------------------------- +:mod:`!email.utils`: Miscellaneous utilities +-------------------------------------------- .. module:: email.utils :synopsis: Miscellaneous email package utilities. diff --git a/Doc/library/ensurepip.rst b/Doc/library/ensurepip.rst index 168e45cfd6fc90..518a2940edcf69 100644 --- a/Doc/library/ensurepip.rst +++ b/Doc/library/ensurepip.rst @@ -1,5 +1,5 @@ -:mod:`ensurepip` --- Bootstrapping the ``pip`` installer -======================================================== +:mod:`!ensurepip` --- Bootstrapping the ``pip`` installer +========================================================= .. module:: ensurepip :synopsis: Bootstrapping the "pip" installer into an existing Python diff --git a/Doc/library/enum.rst b/Doc/library/enum.rst index 21f41b73086c98..8c604c2347a547 100644 --- a/Doc/library/enum.rst +++ b/Doc/library/enum.rst @@ -1,5 +1,5 @@ -:mod:`enum` --- Support for enumerations -======================================== +:mod:`!enum` --- Support for enumerations +========================================= .. module:: enum :synopsis: Implementation of an enumeration class. diff --git a/Doc/library/errno.rst b/Doc/library/errno.rst index 283e8b013265d9..4983b8961b1c3f 100644 --- a/Doc/library/errno.rst +++ b/Doc/library/errno.rst @@ -1,5 +1,5 @@ -:mod:`errno` --- Standard errno system symbols -============================================== +:mod:`!errno` --- Standard errno system symbols +=============================================== .. module:: errno :synopsis: Standard errno system symbols. diff --git a/Doc/library/exceptions.rst b/Doc/library/exceptions.rst index 7879fb015bddfa..7910b306f143d7 100644 --- a/Doc/library/exceptions.rst +++ b/Doc/library/exceptions.rst @@ -989,7 +989,8 @@ their subgroups based on the types of the contained exceptions. Returns an exception group with the same :attr:`message`, but which wraps the exceptions in ``excs``. - This method is used by :meth:`subgroup` and :meth:`split`. A + This method is used by :meth:`subgroup` and :meth:`split`, which + are used in various contexts to break up an exception group. A subclass needs to override it in order to make :meth:`subgroup` and :meth:`split` return instances of the subclass rather than :exc:`ExceptionGroup`. diff --git a/Doc/library/faulthandler.rst b/Doc/library/faulthandler.rst index c40d5e9aacb83c..4067d7912b88b2 100644 --- a/Doc/library/faulthandler.rst +++ b/Doc/library/faulthandler.rst @@ -1,5 +1,5 @@ -:mod:`faulthandler` --- Dump the Python traceback -================================================= +:mod:`!faulthandler` --- Dump the Python traceback +================================================== .. module:: faulthandler :synopsis: Dump the Python traceback. diff --git a/Doc/library/fcntl.rst b/Doc/library/fcntl.rst index 59215f34e01cb7..7bd64e43dd5bfe 100644 --- a/Doc/library/fcntl.rst +++ b/Doc/library/fcntl.rst @@ -1,5 +1,5 @@ -:mod:`fcntl` --- The ``fcntl`` and ``ioctl`` system calls -========================================================= +:mod:`!fcntl` --- The ``fcntl`` and ``ioctl`` system calls +========================================================== .. module:: fcntl :platform: Unix diff --git a/Doc/library/filecmp.rst b/Doc/library/filecmp.rst index 42d20b9c201783..2a0670ffcc2cbc 100644 --- a/Doc/library/filecmp.rst +++ b/Doc/library/filecmp.rst @@ -1,5 +1,5 @@ -:mod:`filecmp` --- File and Directory Comparisons -================================================= +:mod:`!filecmp` --- File and Directory Comparisons +================================================== .. module:: filecmp :synopsis: Compare files efficiently. diff --git a/Doc/library/fileinput.rst b/Doc/library/fileinput.rst index f93e9a58791eeb..94a4139f64c2e4 100644 --- a/Doc/library/fileinput.rst +++ b/Doc/library/fileinput.rst @@ -1,5 +1,5 @@ -:mod:`fileinput` --- Iterate over lines from multiple input streams -=================================================================== +:mod:`!fileinput` --- Iterate over lines from multiple input streams +==================================================================== .. module:: fileinput :synopsis: Loop over standard input or a list of files. diff --git a/Doc/library/fnmatch.rst b/Doc/library/fnmatch.rst index 7cddecd5e80887..fda44923f204fc 100644 --- a/Doc/library/fnmatch.rst +++ b/Doc/library/fnmatch.rst @@ -1,5 +1,5 @@ -:mod:`fnmatch` --- Unix filename pattern matching -================================================= +:mod:`!fnmatch` --- Unix filename pattern matching +================================================== .. module:: fnmatch :synopsis: Unix shell style filename pattern matching. diff --git a/Doc/library/fractions.rst b/Doc/library/fractions.rst index 887c3844d20faa..552d6030b1ceda 100644 --- a/Doc/library/fractions.rst +++ b/Doc/library/fractions.rst @@ -1,5 +1,5 @@ -:mod:`fractions` --- Rational numbers -===================================== +:mod:`!fractions` --- Rational numbers +====================================== .. module:: fractions :synopsis: Rational numbers. diff --git a/Doc/library/ftplib.rst b/Doc/library/ftplib.rst index 8d1aae018ada12..8c39dc00f5db02 100644 --- a/Doc/library/ftplib.rst +++ b/Doc/library/ftplib.rst @@ -1,5 +1,5 @@ -:mod:`ftplib` --- FTP protocol client -===================================== +:mod:`!ftplib` --- FTP protocol client +====================================== .. module:: ftplib :synopsis: FTP protocol client (requires sockets). diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst index 0c7ef67774cd05..c07b1043afe627 100644 --- a/Doc/library/functions.rst +++ b/Doc/library/functions.rst @@ -141,10 +141,11 @@ are always available. They are listed here in alphabetical order. See also :func:`format` for more information. -.. class:: bool(x=False) +.. class:: bool(object=False, /) - Return a Boolean value, i.e. one of ``True`` or ``False``. *x* is converted - using the standard :ref:`truth testing procedure `. If *x* is false + Return a Boolean value, i.e. one of ``True`` or ``False``. The argument + is converted using the standard :ref:`truth testing procedure `. + If the argument is false or omitted, this returns ``False``; otherwise, it returns ``True``. The :class:`bool` class is a subclass of :class:`int` (see :ref:`typesnumeric`). It cannot be subclassed further. Its only instances are ``False`` and @@ -153,7 +154,7 @@ are always available. They are listed here in alphabetical order. .. index:: pair: Boolean; type .. versionchanged:: 3.7 - *x* is now a positional-only parameter. + The parameter is now positional-only. .. function:: breakpoint(*args, **kws) @@ -371,29 +372,73 @@ are always available. They are listed here in alphabetical order. support for top-level ``await``, ``async for``, and ``async with``. -.. class:: complex(real=0, imag=0) - complex(string) +.. class:: complex(number=0, /) + complex(string, /) + complex(real=0, imag=0) + + Convert a single string or number to a complex number, or create a + complex number from real and imaginary parts. + + Examples: + + .. doctest:: + + >>> complex('+1.23') + (1.23+0j) + >>> complex('-4.5j') + -4.5j + >>> complex('-1.23+4.5j') + (-1.23+4.5j) + >>> complex('\t( -1.23+4.5J )\n') + (-1.23+4.5j) + >>> complex('-Infinity+NaNj') + (-inf+nanj) + >>> complex(1.23) + (1.23+0j) + >>> complex(imag=-4.5) + -4.5j + >>> complex(-1.23, 4.5) + (-1.23+4.5j) + + If the argument is a string, it must contain either a real part (in the + same format as for :func:`float`) or an imaginary part (in the same + format but with a ``'j'`` or ``'J'`` suffix), or both real and imaginary + parts (the sign of the imaginary part is mandatory in this case). + The string can optionally be surrounded by whitespaces and the round + parentheses ``'('`` and ``')'``, which are ignored. + The string must not contain whitespace between ``'+'``, ``'-'``, the + ``'j'`` or ``'J'`` suffix, and the decimal number. + For example, ``complex('1+2j')`` is fine, but ``complex('1 + 2j')`` raises + :exc:`ValueError`. + More precisely, the input must conform to the :token:`~float:complexvalue` + production rule in the following grammar, after parentheses and leading and + trailing whitespace characters are removed: - Return a complex number with the value *real* + *imag*\*1j or convert a string - or number to a complex number. If the first parameter is a string, it will - be interpreted as a complex number and the function must be called without a - second parameter. The second parameter can never be a string. Each argument - may be any numeric type (including complex). If *imag* is omitted, it - defaults to zero and the constructor serves as a numeric conversion like - :class:`int` and :class:`float`. If both arguments are omitted, returns - ``0j``. + .. productionlist:: float + complexvalue: `floatvalue` | + : `floatvalue` ("j" | "J") | + : `floatvalue` `sign` `absfloatvalue` ("j" | "J") + If the argument is a number, the constructor serves as a numeric + conversion like :class:`int` and :class:`float`. For a general Python object ``x``, ``complex(x)`` delegates to - ``x.__complex__()``. If :meth:`~object.__complex__` is not defined then it falls back - to :meth:`~object.__float__`. If :meth:`!__float__` is not defined then it falls back + ``x.__complex__()``. + If :meth:`~object.__complex__` is not defined then it falls back + to :meth:`~object.__float__`. + If :meth:`!__float__` is not defined then it falls back to :meth:`~object.__index__`. - .. note:: + If two arguments are provided or keyword arguments are used, each argument + may be any numeric type (including complex). + If both arguments are real numbers, return a complex number with the real + component *real* and the imaginary component *imag*. + If both arguments are complex numbers, return a complex number with the real + component ``real.real-imag.imag`` and the imaginary component + ``real.imag+imag.real``. + If one of arguments is a real number, only its real component is used in + the above expressions. - When converting from a string, the string must not contain whitespace - around the central ``+`` or ``-`` operator. For example, - ``complex('1+2j')`` is fine, but ``complex('1 + 2j')`` raises - :exc:`ValueError`. + If all arguments are omitted, returns ``0j``. The complex type is described in :ref:`typesnumeric`. @@ -543,18 +588,19 @@ are always available. They are listed here in alphabetical order. The *expression* argument is parsed and evaluated as a Python expression (technically speaking, a condition list) using the *globals* and *locals* - dictionaries as global and local namespace. If the *globals* dictionary is + mappings as global and local namespace. If the *globals* dictionary is present and does not contain a value for the key ``__builtins__``, a reference to the dictionary of the built-in module :mod:`builtins` is inserted under that key before *expression* is parsed. That way you can control what builtins are available to the executed code by inserting your own ``__builtins__`` dictionary into *globals* before passing it to - :func:`eval`. If the *locals* dictionary is omitted it defaults to the - *globals* dictionary. If both dictionaries are omitted, the expression is + :func:`eval`. If the *locals* mapping is omitted it defaults to the + *globals* dictionary. If both mappings are omitted, the expression is executed with the *globals* and *locals* in the environment where - :func:`eval` is called. Note, *eval()* does not have access to the + :func:`eval` is called. Note, *eval()* will only have access to the :term:`nested scopes ` (non-locals) in the enclosing - environment. + environment if they are already referenced in the scope that is calling + :func:`eval` (e.g. via a :keyword:`nonlocal` statement). Example: @@ -587,6 +633,11 @@ are always available. They are listed here in alphabetical order. The *globals* and *locals* arguments can now be passed as keywords. + .. versionchanged:: 3.13 + + The semantics of the default *locals* namespace have been adjusted as + described for the :func:`locals` builtin. + .. index:: pair: built-in function; exec .. function:: exec(source, /, globals=None, locals=None, *, closure=None) @@ -608,9 +659,15 @@ are always available. They are listed here in alphabetical order. will be used for both the global and the local variables. If *globals* and *locals* are given, they are used for the global and local variables, respectively. If provided, *locals* can be any mapping object. Remember - that at the module level, globals and locals are the same dictionary. If exec - gets two separate objects as *globals* and *locals*, the code will be - executed as if it were embedded in a class definition. + that at the module level, globals and locals are the same dictionary. + + .. note:: + + When ``exec`` gets two separate objects as *globals* and *locals*, the + code will be executed as if it were embedded in a class definition. This + means functions and classes defined in the executed code will not be able + to access variables assigned at the top level (as the "top level" + variables are treated as class variables in a class definition). If the *globals* dictionary does not contain a value for the key ``__builtins__``, a reference to the dictionary of the built-in module @@ -631,7 +688,7 @@ are always available. They are listed here in alphabetical order. .. note:: The built-in functions :func:`globals` and :func:`locals` return the current - global and local dictionary, respectively, which may be useful to pass around + global and local namespace, respectively, which may be useful to pass around for use as the second and third argument to :func:`exec`. .. note:: @@ -647,6 +704,11 @@ are always available. They are listed here in alphabetical order. The *globals* and *locals* arguments can now be passed as keywords. + .. versionchanged:: 3.13 + + The semantics of the default *locals* namespace have been adjusted as + described for the :func:`locals` builtin. + .. function:: filter(function, iterable) @@ -665,21 +727,38 @@ are always available. They are listed here in alphabetical order. elements of *iterable* for which *function* is false. -.. class:: float(x=0.0) +.. class:: float(number=0.0, /) + float(string, /) .. index:: single: NaN single: Infinity - Return a floating point number constructed from a number or string *x*. + Return a floating point number constructed from a number or a string. + + Examples: + + .. doctest:: + + >>> float('+1.23') + 1.23 + >>> float(' -12345\n') + -12345.0 + >>> float('1e-003') + 0.001 + >>> float('+1E6') + 1000000.0 + >>> float('-Infinity') + -inf If the argument is a string, it should contain a decimal number, optionally preceded by a sign, and optionally embedded in whitespace. The optional sign may be ``'+'`` or ``'-'``; a ``'+'`` sign has no effect on the value produced. The argument may also be a string representing a NaN - (not-a-number), or positive or negative infinity. More precisely, the - input must conform to the ``floatvalue`` production rule in the following - grammar, after leading and trailing whitespace characters are removed: + (not-a-number), or positive or negative infinity. + More precisely, the input must conform to the :token:`~float:floatvalue` + production rule in the following grammar, after leading and trailing + whitespace characters are removed: .. productionlist:: float sign: "+" | "-" @@ -688,9 +767,10 @@ are always available. They are listed here in alphabetical order. digit: digitpart: `digit` (["_"] `digit`)* number: [`digitpart`] "." `digitpart` | `digitpart` ["."] - exponent: ("e" | "E") ["+" | "-"] `digitpart` - floatnumber: number [`exponent`] - floatvalue: [`sign`] (`floatnumber` | `infinity` | `nan`) + exponent: ("e" | "E") [`sign`] `digitpart` + floatnumber: `number` [`exponent`] + absfloatvalue: `floatnumber` | `infinity` | `nan` + floatvalue: [`sign`] `absfloatvalue` Case is not significant, so, for example, "inf", "Inf", "INFINITY", and "iNfINity" are all acceptable spellings for positive infinity. @@ -706,26 +786,13 @@ are always available. They are listed here in alphabetical order. If no argument is given, ``0.0`` is returned. - Examples:: - - >>> float('+1.23') - 1.23 - >>> float(' -12345\n') - -12345.0 - >>> float('1e-003') - 0.001 - >>> float('+1E6') - 1000000.0 - >>> float('-Infinity') - -inf - The float type is described in :ref:`typesnumeric`. .. versionchanged:: 3.6 Grouping digits with underscores as in code literals is allowed. .. versionchanged:: 3.7 - *x* is now a positional-only parameter. + The parameter is now positional-only. .. versionchanged:: 3.8 Falls back to :meth:`~object.__index__` if :meth:`~object.__float__` is not defined. @@ -909,17 +976,36 @@ are always available. They are listed here in alphabetical order. with the result after successfully reading input. -.. class:: int(x=0) - int(x, base=10) +.. class:: int(number=0, /) + int(string, /, base=10) + + Return an integer object constructed from a number or a string, or return + ``0`` if no arguments are given. + + Examples: - Return an integer object constructed from a number or string *x*, or return - ``0`` if no arguments are given. If *x* defines :meth:`~object.__int__`, - ``int(x)`` returns ``x.__int__()``. If *x* defines :meth:`~object.__index__`, - it returns ``x.__index__()``. If *x* defines :meth:`~object.__trunc__`, + .. doctest:: + + >>> int(123.45) + 123 + >>> int('123') + 123 + >>> int(' -12_345\n') + -12345 + >>> int('FACE', 16) + 64206 + >>> int('0xface', 0) + 64206 + >>> int('01110011', base=2) + 115 + + If the argument defines :meth:`~object.__int__`, + ``int(x)`` returns ``x.__int__()``. If the argument defines :meth:`~object.__index__`, + it returns ``x.__index__()``. If the argument defines :meth:`~object.__trunc__`, it returns ``x.__trunc__()``. For floating point numbers, this truncates towards zero. - If *x* is not a number or if *base* is given, then *x* must be a string, + If the argument is not a number or if *base* is given, then it must be a string, :class:`bytes`, or :class:`bytearray` instance representing an integer in radix *base*. Optionally, the string can be preceded by ``+`` or ``-`` (with no space in between), have leading zeros, be surrounded by whitespace, @@ -949,7 +1035,7 @@ are always available. They are listed here in alphabetical order. Grouping digits with underscores as in code literals is allowed. .. versionchanged:: 3.7 - *x* is now a positional-only parameter. + The first parameter is now positional-only. .. versionchanged:: 3.8 Falls back to :meth:`~object.__index__` if :meth:`~object.__int__` is not defined. @@ -960,7 +1046,7 @@ are always available. They are listed here in alphabetical order. .. versionchanged:: 3.11 :class:`int` string inputs and string representations can be limited to help avoid denial of service attacks. A :exc:`ValueError` is raised when - the limit is exceeded while converting a string *x* to an :class:`int` or + the limit is exceeded while converting a string to an :class:`int` or when converting an :class:`int` into a string would exceed the limit. See the :ref:`integer string conversion length limitation ` documentation. @@ -1052,39 +1138,51 @@ are always available. They are listed here in alphabetical order. variable names as the keys, and their currently bound references as the values. - At module scope, as well as when using ``exec()`` or ``eval()`` with a - single namespace, this function returns the same namespace as ``globals()``. + At module scope, as well as when using :func:`exec` or :func:`eval` with + a single namespace, this function returns the same namespace as + :func:`globals`. At class scope, it returns the namespace that will be passed to the metaclass constructor. When using ``exec()`` or ``eval()`` with separate local and global - namespaces, it returns the local namespace passed in to the function call. + arguments, it returns the local namespace passed in to the function call. In all of the above cases, each call to ``locals()`` in a given frame of execution will return the *same* mapping object. Changes made through - the mapping object returned from ``locals()`` will be visible as bound, - rebound, or deleted local variables, and binding, rebinding, or deleting - local variables will immediately affect the contents of the returned mapping - object. - - At function scope (including for generators and coroutines), each call to - ``locals()`` instead returns a fresh dictionary containing the current - bindings of the function's local variables and any nonlocal cell references. - In this case, name binding changes made via the returned dict are *not* - written back to the corresponding local variables or nonlocal cell - references, and binding, rebinding, or deleting local variables and nonlocal - cell references does *not* affect the contents of previously returned - dictionaries. + the mapping object returned from ``locals()`` will be visible as assigned, + reassigned, or deleted local variables, and assigning, reassigning, or + deleting local variables will immediately affect the contents of the + returned mapping object. + + In an :term:`optimized scope` (including functions, generators, and + coroutines), each call to ``locals()`` instead returns a fresh dictionary + containing the current bindings of the function's local variables and any + nonlocal cell references. In this case, name binding changes made via the + returned dict are *not* written back to the corresponding local variables + or nonlocal cell references, and assigning, reassigning, or deleting local + variables and nonlocal cell references does *not* affect the contents + of previously returned dictionaries. + + Calling ``locals()`` as part of a comprehension in a function, generator, or + coroutine is equivalent to calling it in the containing scope, except that + the comprehension's initialised iteration variables will be included. In + other scopes, it behaves as if the comprehension were running as a nested + function. + + Calling ``locals()`` as part of a generator expression is equivalent to + calling it in a nested generator function. + + .. versionchanged:: 3.12 + The behaviour of ``locals()`` in a comprehension has been updated as + described in :pep:`709`. .. versionchanged:: 3.13 - In previous versions, the semantics of mutating the mapping object - returned from this function were formally undefined. In CPython - specifically, the mapping returned at function scope could be - implicitly refreshed by other operations, such as calling ``locals()`` - again. Obtaining the legacy CPython behaviour now requires explicit - calls to update the initially returned dictionary with the results - of subsequent calls to ``locals()``. + As part of :pep:`667`, the semantics of mutating the mapping objects + returned from this function are now defined. The behavior in + :term:`optimized scopes ` is now as described above. + Aside from being defined, the behaviour in other scopes remains + unchanged from previous versions. .. function:: map(function, iterable, *iterables) @@ -1971,14 +2069,18 @@ are always available. They are listed here in alphabetical order. :attr:`~object.__dict__` attributes (for example, classes use a :class:`types.MappingProxyType` to prevent direct dictionary updates). - Without an argument, :func:`vars` acts like :func:`locals`. Note, the - locals dictionary is only useful for reads since updates to the locals - dictionary are ignored. + Without an argument, :func:`vars` acts like :func:`locals`. A :exc:`TypeError` exception is raised if an object is specified but it doesn't have a :attr:`~object.__dict__` attribute (for example, if its class defines the :attr:`~object.__slots__` attribute). + .. versionchanged:: 3.13 + + The result of calling this function without an argument has been + updated as described for the :func:`locals` builtin. + + .. function:: zip(*iterables, strict=False) Iterate over several iterables in parallel, producing tuples with an item diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst index 82c970d25a7aac..9d5c72802a21f2 100644 --- a/Doc/library/functools.rst +++ b/Doc/library/functools.rst @@ -1,5 +1,5 @@ -:mod:`functools` --- Higher-order functions and operations on callable objects -============================================================================== +:mod:`!functools` --- Higher-order functions and operations on callable objects +=============================================================================== .. module:: functools :synopsis: Higher-order functions and operations on callable objects. @@ -325,7 +325,7 @@ The :mod:`functools` module defines the following functions: .. versionadded:: 3.2 .. versionchanged:: 3.4 - Returning NotImplemented from the underlying comparison function for + Returning ``NotImplemented`` from the underlying comparison function for unrecognised types is now supported. .. function:: partial(func, /, *args, **keywords) @@ -646,8 +646,9 @@ The :mod:`functools` module defines the following functions: attributes of the wrapper function are updated with the corresponding attributes from the original function. The default values for these arguments are the module level constants ``WRAPPER_ASSIGNMENTS`` (which assigns to the wrapper - function's ``__module__``, ``__name__``, ``__qualname__``, ``__annotations__`` - and ``__doc__``, the documentation string) and ``WRAPPER_UPDATES`` (which + function's ``__module__``, ``__name__``, ``__qualname__``, ``__annotations__``, + ``__type_params__``, and ``__doc__``, the documentation string) + and ``WRAPPER_UPDATES`` (which updates the wrapper function's ``__dict__``, i.e. the instance dictionary). To allow access to the original function for introspection and other purposes @@ -677,6 +678,9 @@ The :mod:`functools` module defines the following functions: function, even if that function defined a ``__wrapped__`` attribute. (see :issue:`17482`) + .. versionchanged:: 3.12 + The ``__type_params__`` attribute is now copied by default. + .. decorator:: wraps(wrapped, assigned=WRAPPER_ASSIGNMENTS, updated=WRAPPER_UPDATES) diff --git a/Doc/library/gc.rst b/Doc/library/gc.rst index e36a71af2b64ab..790dfdfd00b196 100644 --- a/Doc/library/gc.rst +++ b/Doc/library/gc.rst @@ -1,5 +1,5 @@ -:mod:`gc` --- Garbage Collector interface -========================================= +:mod:`!gc` --- Garbage Collector interface +========================================== .. module:: gc :synopsis: Interface to the cycle-detecting garbage collector. @@ -69,7 +69,7 @@ The :mod:`gc` module provides the following functions: .. function:: get_objects(generation=None) Returns a list of all objects tracked by the collector, excluding the list - returned. If *generation* is not None, return only the objects tracked by + returned. If *generation* is not ``None``, return only the objects tracked by the collector that are in that generation. .. versionchanged:: 3.8 diff --git a/Doc/library/getopt.rst b/Doc/library/getopt.rst index ada68b240143e8..d43d3250732306 100644 --- a/Doc/library/getopt.rst +++ b/Doc/library/getopt.rst @@ -1,5 +1,5 @@ -:mod:`getopt` --- C-style parser for command line options -========================================================= +:mod:`!getopt` --- C-style parser for command line options +========================================================== .. module:: getopt :synopsis: Portable parser for command line options; support both short and diff --git a/Doc/library/getpass.rst b/Doc/library/getpass.rst index 54c84d45a59856..9d67250033df81 100644 --- a/Doc/library/getpass.rst +++ b/Doc/library/getpass.rst @@ -1,5 +1,5 @@ -:mod:`getpass` --- Portable password input -========================================== +:mod:`!getpass` --- Portable password input +=========================================== .. module:: getpass :synopsis: Portable reading of passwords and retrieval of the userid. diff --git a/Doc/library/gettext.rst b/Doc/library/gettext.rst index 41beac3e0c7396..d0de83907eb297 100644 --- a/Doc/library/gettext.rst +++ b/Doc/library/gettext.rst @@ -1,5 +1,5 @@ -:mod:`gettext` --- Multilingual internationalization services -============================================================= +:mod:`!gettext` --- Multilingual internationalization services +============================================================== .. module:: gettext :synopsis: Multilingual internationalization services. diff --git a/Doc/library/glob.rst b/Doc/library/glob.rst index ab6da98bc74ad2..684466d354aef8 100644 --- a/Doc/library/glob.rst +++ b/Doc/library/glob.rst @@ -1,5 +1,5 @@ -:mod:`glob` --- Unix style pathname pattern expansion -===================================================== +:mod:`!glob` --- Unix style pathname pattern expansion +====================================================== .. module:: glob :synopsis: Unix shell style pathname pattern expansion. diff --git a/Doc/library/graphlib.rst b/Doc/library/graphlib.rst index 5414d6370b78ce..a0b16576fad219 100644 --- a/Doc/library/graphlib.rst +++ b/Doc/library/graphlib.rst @@ -1,5 +1,5 @@ -:mod:`graphlib` --- Functionality to operate with graph-like structures -========================================================================= +:mod:`!graphlib` --- Functionality to operate with graph-like structures +======================================================================== .. module:: graphlib :synopsis: Functionality to operate with graph-like structures diff --git a/Doc/library/grp.rst b/Doc/library/grp.rst index 9cf25b7ae137a3..30caea328baa79 100644 --- a/Doc/library/grp.rst +++ b/Doc/library/grp.rst @@ -1,5 +1,5 @@ -:mod:`grp` --- The group database -================================= +:mod:`!grp` --- The group database +================================== .. module:: grp :platform: Unix diff --git a/Doc/library/gzip.rst b/Doc/library/gzip.rst index e6106e72a83d25..965da5981f6dbc 100644 --- a/Doc/library/gzip.rst +++ b/Doc/library/gzip.rst @@ -1,5 +1,5 @@ -:mod:`gzip` --- Support for :program:`gzip` files -================================================= +:mod:`!gzip` --- Support for :program:`gzip` files +================================================== .. module:: gzip :synopsis: Interfaces for gzip compression and decompression using file objects. @@ -102,7 +102,7 @@ The module defines the following items: The optional *mtime* argument is the timestamp requested by gzip. The time is in Unix format, i.e., seconds since 00:00:00 UTC, January 1, 1970. - If *mtime* is omitted or None, the current time is used. Use *mtime* = 0 + If *mtime* is omitted or ``None``, the current time is used. Use *mtime* = 0 to generate a compressed stream that does not depend on creation time. See below for the :attr:`mtime` attribute that is set when decompressing. diff --git a/Doc/library/hashlib.rst b/Doc/library/hashlib.rst index aa0c6fc503e8ff..5d24b77e13bfce 100644 --- a/Doc/library/hashlib.rst +++ b/Doc/library/hashlib.rst @@ -1,5 +1,5 @@ -:mod:`hashlib` --- Secure hashes and message digests -==================================================== +:mod:`!hashlib` --- Secure hashes and message digests +===================================================== .. module:: hashlib :synopsis: Secure hash and message digest algorithms. @@ -328,7 +328,7 @@ include a `salt `_. your application, read *Appendix A.2.2* of NIST-SP-800-132_. The answers on the `stackexchange pbkdf2 iterations question`_ explain in detail. - *dklen* is the length of the derived key. If *dklen* is ``None`` then the + *dklen* is the length of the derived key in bytes. If *dklen* is ``None`` then the digest size of the hash algorithm *hash_name* is used, e.g. 64 for SHA-512. >>> from hashlib import pbkdf2_hmac @@ -357,7 +357,7 @@ include a `salt `_. *n* is the CPU/Memory cost factor, *r* the block size, *p* parallelization factor and *maxmem* limits memory (OpenSSL 1.1.0 defaults to 32 MiB). - *dklen* is the length of the derived key. + *dklen* is the length of the derived key in bytes. .. versionadded:: 3.6 diff --git a/Doc/library/heapq.rst b/Doc/library/heapq.rst index ad407141a2f590..d3c4b920ba500a 100644 --- a/Doc/library/heapq.rst +++ b/Doc/library/heapq.rst @@ -1,5 +1,5 @@ -:mod:`heapq` --- Heap queue algorithm -===================================== +:mod:`!heapq` --- Heap queue algorithm +====================================== .. module:: heapq :synopsis: Heap queue algorithm (a.k.a. priority queue). diff --git a/Doc/library/hmac.rst b/Doc/library/hmac.rst index 43012e03c580e8..d6692033b2d4c3 100644 --- a/Doc/library/hmac.rst +++ b/Doc/library/hmac.rst @@ -1,5 +1,5 @@ -:mod:`hmac` --- Keyed-Hashing for Message Authentication -======================================================== +:mod:`!hmac` --- Keyed-Hashing for Message Authentication +========================================================= .. module:: hmac :synopsis: Keyed-Hashing for Message Authentication (HMAC) implementation diff --git a/Doc/library/html.entities.rst b/Doc/library/html.entities.rst index 10529561a92cd0..add18e4c87d220 100644 --- a/Doc/library/html.entities.rst +++ b/Doc/library/html.entities.rst @@ -1,5 +1,5 @@ -:mod:`html.entities` --- Definitions of HTML general entities -============================================================= +:mod:`!html.entities` --- Definitions of HTML general entities +============================================================== .. module:: html.entities :synopsis: Definitions of HTML general entities. diff --git a/Doc/library/html.parser.rst b/Doc/library/html.parser.rst index d35090111e0822..6d433b5a04fc4a 100644 --- a/Doc/library/html.parser.rst +++ b/Doc/library/html.parser.rst @@ -1,5 +1,5 @@ -:mod:`html.parser` --- Simple HTML and XHTML parser -=================================================== +:mod:`!html.parser` --- Simple HTML and XHTML parser +==================================================== .. module:: html.parser :synopsis: A simple parser that can handle HTML and XHTML. diff --git a/Doc/library/html.rst b/Doc/library/html.rst index c2b01e14ea7555..9aa39ba9a42b0f 100644 --- a/Doc/library/html.rst +++ b/Doc/library/html.rst @@ -1,5 +1,5 @@ -:mod:`html` --- HyperText Markup Language support -================================================= +:mod:`!html` --- HyperText Markup Language support +================================================== .. module:: html :synopsis: Helpers for manipulating HTML. diff --git a/Doc/library/http.client.rst b/Doc/library/http.client.rst index 7e4502064f22a1..2835c8d0eb711e 100644 --- a/Doc/library/http.client.rst +++ b/Doc/library/http.client.rst @@ -1,5 +1,5 @@ -:mod:`http.client` --- HTTP protocol client -=========================================== +:mod:`!http.client` --- HTTP protocol client +============================================ .. module:: http.client :synopsis: HTTP and HTTPS protocol client (requires sockets). diff --git a/Doc/library/http.cookiejar.rst b/Doc/library/http.cookiejar.rst index 2fe188be641c2d..31ac8bafb6ab4b 100644 --- a/Doc/library/http.cookiejar.rst +++ b/Doc/library/http.cookiejar.rst @@ -1,5 +1,5 @@ -:mod:`http.cookiejar` --- Cookie handling for HTTP clients -========================================================== +:mod:`!http.cookiejar` --- Cookie handling for HTTP clients +=========================================================== .. module:: http.cookiejar :synopsis: Classes for automatic handling of HTTP cookies. diff --git a/Doc/library/http.cookies.rst b/Doc/library/http.cookies.rst index e91972fe621a48..4ce2e3c4f4cb42 100644 --- a/Doc/library/http.cookies.rst +++ b/Doc/library/http.cookies.rst @@ -1,5 +1,5 @@ -:mod:`http.cookies` --- HTTP state management -============================================= +:mod:`!http.cookies` --- HTTP state management +============================================== .. module:: http.cookies :synopsis: Support for HTTP state management (cookies). diff --git a/Doc/library/http.rst b/Doc/library/http.rst index 998d6e73f9dd82..ce3fb9f8120502 100644 --- a/Doc/library/http.rst +++ b/Doc/library/http.rst @@ -1,5 +1,5 @@ -:mod:`http` --- HTTP modules -============================ +:mod:`!http` --- HTTP modules +============================= .. module:: http :synopsis: HTTP status codes and messages diff --git a/Doc/library/http.server.rst b/Doc/library/http.server.rst index 886e359bd8cd62..3c80fa747d5f1f 100644 --- a/Doc/library/http.server.rst +++ b/Doc/library/http.server.rst @@ -1,5 +1,5 @@ -:mod:`http.server` --- HTTP servers -=================================== +:mod:`!http.server` --- HTTP servers +==================================== .. module:: http.server :synopsis: HTTP server and request handlers. diff --git a/Doc/library/idle.rst b/Doc/library/idle.rst index 17a5144b4c0635..59b181aab3e484 100644 --- a/Doc/library/idle.rst +++ b/Doc/library/idle.rst @@ -429,7 +429,7 @@ Several non-character keys move the cursor and possibly delete characters. Deletion does not puts text on the clipboard, but IDLE has an undo list. Wherever this doc discusses keys, 'C' refers to the :kbd:`Control` key on Windows and -Unix and the :kbd:`Command` key on macOS. (And all such dicussions +Unix and the :kbd:`Command` key on macOS. (And all such discussions assume that the keys have not been re-bound to something else.) * Arrow keys move the cursor one character or line. diff --git a/Doc/library/imaplib.rst b/Doc/library/imaplib.rst index ccfd0cd3dde109..a2dad58b00b9fa 100644 --- a/Doc/library/imaplib.rst +++ b/Doc/library/imaplib.rst @@ -1,5 +1,5 @@ -:mod:`imaplib` --- IMAP4 protocol client -======================================== +:mod:`!imaplib` --- IMAP4 protocol client +========================================= .. module:: imaplib :synopsis: IMAP4 protocol client (requires sockets). @@ -39,7 +39,7 @@ base class: initialized. If *host* is not specified, ``''`` (the local host) is used. If *port* is omitted, the standard IMAP4 port (143) is used. The optional *timeout* parameter specifies a timeout in seconds for the connection attempt. - If timeout is not given or is None, the global default socket timeout is used. + If timeout is not given or is ``None``, the global default socket timeout is used. The :class:`IMAP4` class supports the :keyword:`with` statement. When used like this, the IMAP4 ``LOGOUT`` command is issued automatically when the @@ -97,7 +97,7 @@ There's also a subclass for secure connections: best practices. The optional *timeout* parameter specifies a timeout in seconds for the - connection attempt. If timeout is not given or is None, the global default + connection attempt. If timeout is not given or is ``None``, the global default socket timeout is used. .. versionchanged:: 3.3 @@ -360,7 +360,7 @@ An :class:`IMAP4` instance has the following methods: Opens socket to *port* at *host*. The optional *timeout* parameter specifies a timeout in seconds for the connection attempt. - If timeout is not given or is None, the global default socket timeout + If timeout is not given or is ``None``, the global default socket timeout is used. Also note that if the *timeout* parameter is set to be zero, it will raise a :class:`ValueError` to reject creating a non-blocking socket. This method is implicitly called by the :class:`IMAP4` constructor. diff --git a/Doc/library/importlib.metadata.rst b/Doc/library/importlib.metadata.rst index 674ce5807fdf11..9c0879f5ca850f 100644 --- a/Doc/library/importlib.metadata.rst +++ b/Doc/library/importlib.metadata.rst @@ -343,7 +343,7 @@ instance:: >>> dist.metadata['License'] # doctest: +SKIP 'MIT' -For editable packages, an origin property may present :pep:`610` +For editable packages, an ``origin`` property may present :pep:`610` metadata:: >>> dist.origin.url diff --git a/Doc/library/importlib.resources.abc.rst b/Doc/library/importlib.resources.abc.rst index c25c48530722e6..5ea8044e1ec6ca 100644 --- a/Doc/library/importlib.resources.abc.rst +++ b/Doc/library/importlib.resources.abc.rst @@ -1,5 +1,5 @@ -:mod:`importlib.resources.abc` -- Abstract base classes for resources ---------------------------------------------------------------------- +:mod:`!importlib.resources.abc` -- Abstract base classes for resources +---------------------------------------------------------------------- .. module:: importlib.resources.abc :synopsis: Abstract base classes for resources @@ -103,11 +103,11 @@ .. abstractmethod:: is_dir() - Return True if self is a directory. + Return ``True`` if self is a directory. .. abstractmethod:: is_file() - Return True if self is a file. + Return ``True`` if self is a file. .. abstractmethod:: joinpath(*pathsegments) diff --git a/Doc/library/importlib.resources.rst b/Doc/library/importlib.resources.rst index 9a5e4c76e7bd8f..e002198899c8b8 100644 --- a/Doc/library/importlib.resources.rst +++ b/Doc/library/importlib.resources.rst @@ -1,5 +1,5 @@ -:mod:`importlib.resources` -- Package resource reading, opening and access --------------------------------------------------------------------------- +:mod:`!importlib.resources` -- Package resource reading, opening and access +--------------------------------------------------------------------------- .. module:: importlib.resources :synopsis: Package resource reading, opening, and access diff --git a/Doc/library/inspect.rst b/Doc/library/inspect.rst index 4a0a090facb8bb..7130faa4b5b696 100644 --- a/Doc/library/inspect.rst +++ b/Doc/library/inspect.rst @@ -1,5 +1,5 @@ -:mod:`inspect` --- Inspect live objects -======================================= +:mod:`!inspect` --- Inspect live objects +======================================== .. testsetup:: * diff --git a/Doc/library/io.rst b/Doc/library/io.rst index 8eb531aa4ea248..748c49968f505c 100644 --- a/Doc/library/io.rst +++ b/Doc/library/io.rst @@ -1,5 +1,5 @@ -:mod:`io` --- Core tools for working with streams -================================================= +:mod:`!io` --- Core tools for working with streams +================================================== .. module:: io :synopsis: Core tools for working with streams. diff --git a/Doc/library/ipaddress.rst b/Doc/library/ipaddress.rst index a4073a4dac86b9..d7dccf1a86593d 100644 --- a/Doc/library/ipaddress.rst +++ b/Doc/library/ipaddress.rst @@ -1,5 +1,5 @@ -:mod:`ipaddress` --- IPv4/IPv6 manipulation library -=================================================== +:mod:`!ipaddress` --- IPv4/IPv6 manipulation library +==================================================== .. module:: ipaddress :synopsis: IPv4/IPv6 manipulation library. diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst index fb3f33370e3faa..3dc3f60923a0ba 100644 --- a/Doc/library/itertools.rst +++ b/Doc/library/itertools.rst @@ -1,5 +1,5 @@ -:mod:`itertools` --- Functions creating iterators for efficient looping -======================================================================= +:mod:`!itertools` --- Functions creating iterators for efficient looping +======================================================================== .. module:: itertools :synopsis: Functions creating iterators for efficient looping. @@ -56,13 +56,13 @@ Iterator Arguments Results :func:`chain` p, q, ... p0, p1, ... plast, q0, q1, ... ``chain('ABC', 'DEF') → A B C D E F`` :func:`chain.from_iterable` iterable p0, p1, ... plast, q0, q1, ... ``chain.from_iterable(['ABC', 'DEF']) → A B C D E F`` :func:`compress` data, selectors (d[0] if s[0]), (d[1] if s[1]), ... ``compress('ABCDEF', [1,0,1,0,1,1]) → A C E F`` -:func:`dropwhile` predicate, seq seq[n], seq[n+1], starting when predicate fails ``dropwhile(lambda x: x<5, [1,4,6,4,1]) → 6 4 1`` -:func:`filterfalse` predicate, seq elements of seq where predicate(elem) fails ``filterfalse(lambda x: x%2, range(10)) → 0 2 4 6 8`` +:func:`dropwhile` predicate, seq seq[n], seq[n+1], starting when predicate fails ``dropwhile(lambda x: x<5, [1,4,6,3,8]) → 6 3 8`` +:func:`filterfalse` predicate, seq elements of seq where predicate(elem) fails ``filterfalse(lambda x: x<5, [1,4,6,3,8]) → 6 8`` :func:`groupby` iterable[, key] sub-iterators grouped by value of key(v) :func:`islice` seq, [start,] stop [, step] elements from seq[start:stop:step] ``islice('ABCDEFG', 2, None) → C D E F G`` :func:`pairwise` iterable (p[0], p[1]), (p[1], p[2]) ``pairwise('ABCDEFG') → AB BC CD DE EF FG`` :func:`starmap` func, seq func(\*seq[0]), func(\*seq[1]), ... ``starmap(pow, [(2,5), (3,2), (10,3)]) → 32 9 1000`` -:func:`takewhile` predicate, seq seq[0], seq[1], until predicate fails ``takewhile(lambda x: x<5, [1,4,6,4,1]) → 1 4`` +:func:`takewhile` predicate, seq seq[0], seq[1], until predicate fails ``takewhile(lambda x: x<5, [1,4,6,3,8]) → 1 4`` :func:`tee` it, n it1, it2, ... itn splits one iterator into n :func:`zip_longest` p, q, ... (p[0], q[0]), (p[1], q[1]), ... ``zip_longest('ABCD', 'xy', fillvalue='-') → Ax By C- D-`` ============================ ============================ ================================================= ============================================================= @@ -97,59 +97,57 @@ The following module functions all construct and return iterators. Some provide streams of infinite length, so they should only be accessed by functions or loops that truncate the stream. -.. function:: accumulate(iterable[, func, *, initial=None]) - Make an iterator that returns accumulated sums, or accumulated - results of other binary functions (specified via the optional - *func* argument). +.. function:: accumulate(iterable[, function, *, initial=None]) - If *func* is supplied, it should be a function - of two arguments. Elements of the input *iterable* may be any type - that can be accepted as arguments to *func*. (For example, with - the default operation of addition, elements may be any addable - type including :class:`~decimal.Decimal` or - :class:`~fractions.Fraction`.) + Make an iterator that returns accumulated sums or accumulated + results from other binary functions. - Usually, the number of elements output matches the input iterable. - However, if the keyword argument *initial* is provided, the - accumulation leads off with the *initial* value so that the output - has one more element than the input iterable. + The *function* defaults to addition. The *function* should accept + two arguments, an accumulated total and a value from the *iterable*. + + If an *initial* value is provided, the accumulation will start with + that value and the output will have one more element than the input + iterable. Roughly equivalent to:: - def accumulate(iterable, func=operator.add, *, initial=None): + def accumulate(iterable, function=operator.add, *, initial=None): 'Return running totals' # accumulate([1,2,3,4,5]) → 1 3 6 10 15 # accumulate([1,2,3,4,5], initial=100) → 100 101 103 106 110 115 # accumulate([1,2,3,4,5], operator.mul) → 1 2 6 24 120 - it = iter(iterable) + + iterator = iter(iterable) total = initial if initial is None: try: - total = next(it) + total = next(iterator) except StopIteration: return + yield total - for element in it: - total = func(total, element) + for element in iterator: + total = function(total, element) yield total - There are a number of uses for the *func* argument. It can be set to - :func:`min` for a running minimum, :func:`max` for a running maximum, or - :func:`operator.mul` for a running product. Amortization tables can be - built by accumulating interest and applying payments: + The *function* argument can be set to :func:`min` for a running + minimum, :func:`max` for a running maximum, or :func:`operator.mul` + for a running product. `Amortization tables + `_ + can be built by accumulating interest and applying payments: .. doctest:: >>> data = [3, 4, 6, 2, 1, 9, 0, 7, 5, 8] - >>> list(accumulate(data, operator.mul)) # running product - [3, 12, 72, 144, 144, 1296, 0, 0, 0, 0] >>> list(accumulate(data, max)) # running maximum [3, 4, 6, 6, 6, 9, 9, 9, 9, 9] + >>> list(accumulate(data, operator.mul)) # running product + [3, 12, 72, 144, 144, 1296, 0, 0, 0, 0] # Amortize a 5% loan of 1000 with 10 annual payments of 90 - >>> account_update = lambda bal, pmt: round(bal * 1.05) + pmt - >>> list(accumulate(repeat(-90, 10), account_update, initial=1_000)) + >>> update = lambda balance, payment: round(balance * 1.05) - payment + >>> list(accumulate(repeat(90, 10), update, initial=1_000)) [1000, 960, 918, 874, 828, 779, 728, 674, 618, 559, 497] See :func:`functools.reduce` for a similar function that returns only the @@ -158,7 +156,7 @@ loops that truncate the stream. .. versionadded:: 3.2 .. versionchanged:: 3.3 - Added the optional *func* parameter. + Added the optional *function* parameter. .. versionchanged:: 3.8 Added the optional *initial* parameter. @@ -184,21 +182,14 @@ loops that truncate the stream. >>> unflattened [('roses', 'red'), ('violets', 'blue'), ('sugar', 'sweet')] - >>> for batch in batched('ABCDEFG', 3): - ... print(batch) - ... - ('A', 'B', 'C') - ('D', 'E', 'F') - ('G',) - Roughly equivalent to:: def batched(iterable, n, *, strict=False): # batched('ABCDEFG', 3) → ABC DEF G if n < 1: raise ValueError('n must be at least one') - it = iter(iterable) - while batch := tuple(islice(it, n)): + iterator = iter(iterable) + while batch := tuple(islice(iterator, n)): if strict and len(batch) != n: raise ValueError('batched(): incomplete batch') yield batch @@ -218,9 +209,8 @@ loops that truncate the stream. def chain(*iterables): # chain('ABC', 'DEF') → A B C D E F - for it in iterables: - for element in it: - yield element + for iterable in iterables: + yield from iterable .. classmethod:: chain.from_iterable(iterable) @@ -230,33 +220,39 @@ loops that truncate the stream. def from_iterable(iterables): # chain.from_iterable(['ABC', 'DEF']) → A B C D E F - for it in iterables: - for element in it: - yield element + for iterable in iterables: + yield from iterable .. function:: combinations(iterable, r) Return *r* length subsequences of elements from the input *iterable*. - The combination tuples are emitted in lexicographic ordering according to - the order of the input *iterable*. So, if the input *iterable* is sorted, + The output is a subsequence of :func:`product` keeping only entries that + are subsequences of the *iterable*. The length of the output is given + by :func:`math.comb` which computes ``n! / r! / (n - r)!`` when ``0 ≤ r + ≤ n`` or zero when ``r > n``. + + The combination tuples are emitted in lexicographic order according to + the order of the input *iterable*. If the input *iterable* is sorted, the output tuples will be produced in sorted order. Elements are treated as unique based on their position, not on their - value. So if the input elements are unique, there will be no repeated - values in each combination. + value. If the input elements are unique, there will be no repeated + values within each combination. Roughly equivalent to:: def combinations(iterable, r): # combinations('ABCD', 2) → AB AC AD BC BD CD # combinations(range(4), 3) → 012 013 023 123 + pool = tuple(iterable) n = len(pool) if r > n: return indices = list(range(r)) + yield tuple(pool[i] for i in indices) while True: for i in reversed(range(r)): @@ -269,42 +265,36 @@ loops that truncate the stream. indices[j] = indices[j-1] + 1 yield tuple(pool[i] for i in indices) - The code for :func:`combinations` can be also expressed as a subsequence - of :func:`permutations` after filtering entries where the elements are not - in sorted order (according to their position in the input pool):: - - def combinations(iterable, r): - pool = tuple(iterable) - n = len(pool) - for indices in permutations(range(n), r): - if sorted(indices) == list(indices): - yield tuple(pool[i] for i in indices) - - The number of items returned is ``n! / r! / (n-r)!`` when ``0 <= r <= n`` - or zero when ``r > n``. .. function:: combinations_with_replacement(iterable, r) Return *r* length subsequences of elements from the input *iterable* allowing individual elements to be repeated more than once. - The combination tuples are emitted in lexicographic ordering according to - the order of the input *iterable*. So, if the input *iterable* is sorted, + The output is a subsequence of :func:`product` that keeps only entries + that are subsequences (with possible repeated elements) of the + *iterable*. The number of subsequence returned is ``(n + r - 1)! / r! / + (n - 1)!`` when ``n > 0``. + + The combination tuples are emitted in lexicographic order according to + the order of the input *iterable*. if the input *iterable* is sorted, the output tuples will be produced in sorted order. Elements are treated as unique based on their position, not on their - value. So if the input elements are unique, the generated combinations + value. If the input elements are unique, the generated combinations will also be unique. Roughly equivalent to:: def combinations_with_replacement(iterable, r): # combinations_with_replacement('ABC', 2) → AA AB AC BB BC CC + pool = tuple(iterable) n = len(pool) if not n and r: return indices = [0] * r + yield tuple(pool[i] for i in indices) while True: for i in reversed(range(r)): @@ -315,41 +305,29 @@ loops that truncate the stream. indices[i:] = [indices[i] + 1] * (r - i) yield tuple(pool[i] for i in indices) - The code for :func:`combinations_with_replacement` can be also expressed as - a subsequence of :func:`product` after filtering entries where the elements - are not in sorted order (according to their position in the input pool):: - - def combinations_with_replacement(iterable, r): - pool = tuple(iterable) - n = len(pool) - for indices in product(range(n), repeat=r): - if sorted(indices) == list(indices): - yield tuple(pool[i] for i in indices) - - The number of items returned is ``(n+r-1)! / r! / (n-1)!`` when ``n > 0``. - .. versionadded:: 3.1 .. function:: compress(data, selectors) - Make an iterator that filters elements from *data* returning only those that - have a corresponding element in *selectors* that evaluates to ``True``. - Stops when either the *data* or *selectors* iterables has been exhausted. - Roughly equivalent to:: + Make an iterator that returns elements from *data* where the + corresponding element in *selectors* is true. Stops when either the + *data* or *selectors* iterables have been exhausted. Roughly + equivalent to:: def compress(data, selectors): # compress('ABCDEF', [1,0,1,0,1,1]) → A C E F - return (d for d, s in zip(data, selectors) if s) + return (datum for datum, selector in zip(data, selectors) if selector) .. versionadded:: 3.1 .. function:: count(start=0, step=1) - Make an iterator that returns evenly spaced values starting with number *start*. Often - used as an argument to :func:`map` to generate consecutive data points. - Also, used with :func:`zip` to add sequence numbers. Roughly equivalent to:: + Make an iterator that returns evenly spaced values beginning with + *start*. Can be used with :func:`map` to generate consecutive data + points or with :func:`zip` to add sequence numbers. Roughly + equivalent to:: def count(start=0, step=1): # count(10) → 10 11 12 13 14 ... @@ -366,11 +344,12 @@ loops that truncate the stream. .. versionchanged:: 3.1 Added *step* argument and allowed non-integer arguments. + .. function:: cycle(iterable) - Make an iterator returning elements from the iterable and saving a copy of each. - When the iterable is exhausted, return elements from the saved copy. Repeats - indefinitely. Roughly equivalent to:: + Make an iterator returning elements from the *iterable* and saving a + copy of each. When the iterable is exhausted, return elements from + the saved copy. Repeats indefinitely. Roughly equivalent to:: def cycle(iterable): # cycle('ABCD') → A B C D A B C D A B C D ... @@ -380,37 +359,43 @@ loops that truncate the stream. saved.append(element) while saved: for element in saved: - yield element + yield element - Note, this member of the toolkit may require significant auxiliary storage - (depending on the length of the iterable). + This itertool may require significant auxiliary storage (depending on + the length of the iterable). .. function:: dropwhile(predicate, iterable) - Make an iterator that drops elements from the iterable as long as the predicate - is true; afterwards, returns every element. Note, the iterator does not produce - *any* output until the predicate first becomes false, so it may have a lengthy - start-up time. Roughly equivalent to:: + Make an iterator that drops elements from the *iterable* while the + *predicate* is true and afterwards returns every element. Roughly + equivalent to:: def dropwhile(predicate, iterable): - # dropwhile(lambda x: x<5, [1,4,6,4,1]) → 6 4 1 - iterable = iter(iterable) - for x in iterable: + # dropwhile(lambda x: x<5, [1,4,6,3,8]) → 6 3 8 + + iterator = iter(iterable) + for x in iterator: if not predicate(x): yield x break - for x in iterable: + + for x in iterator: yield x + Note this does not produce *any* output until the predicate first + becomes false, so this itertool may have a lengthy start-up time. + + .. function:: filterfalse(predicate, iterable) - Make an iterator that filters elements from iterable returning only those for - which the predicate is false. If *predicate* is ``None``, return the items - that are false. Roughly equivalent to:: + Make an iterator that filters elements from the *iterable* returning + only those for which the *predicate* returns a false value. If + *predicate* is ``None``, returns the items that are false. Roughly + equivalent to:: def filterfalse(predicate, iterable): - # filterfalse(lambda x: x%2, range(10)) → 0 2 4 6 8 + # filterfalse(lambda x: x<5, [1,4,6,3,8]) → 6 8 if predicate is None: predicate = bool for x in iterable: @@ -446,55 +431,55 @@ loops that truncate the stream. :func:`groupby` is roughly equivalent to:: - class groupby: + def groupby(iterable, key=None): # [k for k, g in groupby('AAAABBBCCDAABBB')] → A B C D A B # [list(g) for k, g in groupby('AAAABBBCCD')] → AAAA BBB CC D - def __init__(self, iterable, key=None): - if key is None: - key = lambda x: x - self.keyfunc = key - self.it = iter(iterable) - self.tgtkey = self.currkey = self.currvalue = object() - - def __iter__(self): - return self - - def __next__(self): - self.id = object() - while self.currkey == self.tgtkey: - self.currvalue = next(self.it) # Exit on StopIteration - self.currkey = self.keyfunc(self.currvalue) - self.tgtkey = self.currkey - return (self.currkey, self._grouper(self.tgtkey, self.id)) - - def _grouper(self, tgtkey, id): - while self.id is id and self.currkey == tgtkey: - yield self.currvalue - try: - self.currvalue = next(self.it) - except StopIteration: + keyfunc = (lambda x: x) if key is None else key + iterator = iter(iterable) + exhausted = False + + def _grouper(target_key): + nonlocal curr_value, curr_key, exhausted + yield curr_value + for curr_value in iterator: + curr_key = keyfunc(curr_value) + if curr_key != target_key: return - self.currkey = self.keyfunc(self.currvalue) + yield curr_value + exhausted = True + + try: + curr_value = next(iterator) + except StopIteration: + return + curr_key = keyfunc(curr_value) + + while not exhausted: + target_key = curr_key + curr_group = _grouper(target_key) + yield curr_key, curr_group + if curr_key == target_key: + for _ in curr_group: + pass .. function:: islice(iterable, stop) islice(iterable, start, stop[, step]) - Make an iterator that returns selected elements from the iterable. If *start* is - non-zero, then elements from the iterable are skipped until start is reached. - Afterward, elements are returned consecutively unless *step* is set higher than - one which results in items being skipped. If *stop* is ``None``, then iteration - continues until the iterator is exhausted, if at all; otherwise, it stops at the - specified position. + Make an iterator that returns selected elements from the iterable. + Works like sequence slicing but does not support negative values for + *start*, *stop*, or *step*. + + If *start* is zero or ``None``, iteration starts at zero. Otherwise, + elements from the iterable are skipped until *start* is reached. - If *start* is ``None``, then iteration starts at zero. If *step* is ``None``, - then the step defaults to one. + If *stop* is ``None``, iteration continues until the iterator is + exhausted, if at all. Otherwise, it stops at the specified position. - Unlike regular slicing, :func:`islice` does not support negative values for - *start*, *stop*, or *step*. Can be used to extract related fields from - data where the internal structure has been flattened (for example, a - multi-line report may list a name field on every third line). + If *step* is ``None``, the step defaults to one. Elements are returned + consecutively unless *step* is set higher than one which results in + items being skipped. Roughly equivalent to:: @@ -503,13 +488,15 @@ loops that truncate the stream. # islice('ABCDEFG', 2, 4) → C D # islice('ABCDEFG', 2, None) → C D E F G # islice('ABCDEFG', 0, None, 2) → A C E G + s = slice(*args) start = 0 if s.start is None else s.start stop = s.stop step = 1 if s.step is None else s.step if start < 0 or (stop is not None and stop < 0) or step <= 0: raise ValueError - indices = count() if stop is None else range(max(stop, start)) + + indices = count() if stop is None else range(max(start, stop)) next_i = start for i, element in zip(indices, iterable): if i == next_i: @@ -540,18 +527,24 @@ loops that truncate the stream. .. function:: permutations(iterable, r=None) - Return successive *r* length permutations of elements in the *iterable*. + Return successive *r* length `permutations of elements + `_ from the *iterable*. If *r* is not specified or is ``None``, then *r* defaults to the length of the *iterable* and all possible full-length permutations are generated. + The output is a subsequence of :func:`product` where entries with + repeated elements have been filtered out. The length of the output is + given by :func:`math.perm` which computes ``n! / (n - r)!`` when + ``0 ≤ r ≤ n`` or zero when ``r > n``. + The permutation tuples are emitted in lexicographic order according to - the order of the input *iterable*. So, if the input *iterable* is sorted, + the order of the input *iterable*. If the input *iterable* is sorted, the output tuples will be produced in sorted order. Elements are treated as unique based on their position, not on their - value. So if the input elements are unique, there will be no repeated + value. If the input elements are unique, there will be no repeated values within a permutation. Roughly equivalent to:: @@ -559,14 +552,17 @@ loops that truncate the stream. def permutations(iterable, r=None): # permutations('ABCD', 2) → AB AC AD BA BC BD CA CB CD DA DB DC # permutations(range(3)) → 012 021 102 120 201 210 + pool = tuple(iterable) n = len(pool) r = n if r is None else r if r > n: return + indices = list(range(n)) cycles = list(range(n, n-r, -1)) yield tuple(pool[i] for i in indices[:r]) + while n: for i in reversed(range(r)): cycles[i] -= 1 @@ -581,20 +577,6 @@ loops that truncate the stream. else: return - The code for :func:`permutations` can be also expressed as a subsequence of - :func:`product`, filtered to exclude entries with repeated elements (those - from the same position in the input pool):: - - def permutations(iterable, r=None): - pool = tuple(iterable) - n = len(pool) - r = n if r is None else r - for indices in product(range(n), repeat=r): - if len(set(indices)) == r: - yield tuple(pool[i] for i in indices) - - The number of items returned is ``n! / (n-r)!`` when ``0 <= r <= n`` - or zero when ``r > n``. .. function:: product(*iterables, repeat=1) @@ -615,13 +597,16 @@ loops that truncate the stream. This function is roughly equivalent to the following code, except that the actual implementation does not build up intermediate results in memory:: - def product(*args, repeat=1): + def product(*iterables, repeat=1): # product('ABCD', 'xy') → Ax Ay Bx By Cx Cy Dx Dy # product(range(2), repeat=3) → 000 001 010 011 100 101 110 111 - pools = [tuple(pool) for pool in args] * repeat + + pools = [tuple(pool) for pool in iterables] * repeat + result = [[]] for pool in pools: result = [x+[y] for x in result for y in pool] + for prod in result: yield tuple(prod) @@ -629,6 +614,7 @@ loops that truncate the stream. keeping pools of values in memory to generate the products. Accordingly, it is only useful with finite inputs. + .. function:: repeat(object[, times]) Make an iterator that returns *object* over and over again. Runs indefinitely @@ -653,12 +639,12 @@ loops that truncate the stream. >>> list(map(pow, range(10), repeat(2))) [0, 1, 4, 9, 16, 25, 36, 49, 64, 81] + .. function:: starmap(function, iterable) - Make an iterator that computes the function using arguments obtained from - the iterable. Used instead of :func:`map` when argument parameters are already - grouped in tuples from a single iterable (when the data has been - "pre-zipped"). + Make an iterator that computes the *function* using arguments obtained + from the *iterable*. Used instead of :func:`map` when argument + parameters have already been "pre-zipped" into tuples. The difference between :func:`map` and :func:`starmap` parallels the distinction between ``function(a,b)`` and ``function(*c)``. Roughly @@ -672,21 +658,20 @@ loops that truncate the stream. .. function:: takewhile(predicate, iterable) - Make an iterator that returns elements from the iterable as long as the - predicate is true. Roughly equivalent to:: + Make an iterator that returns elements from the *iterable* as long as + the *predicate* is true. Roughly equivalent to:: def takewhile(predicate, iterable): - # takewhile(lambda x: x<5, [1,4,6,4,1]) → 1 4 + # takewhile(lambda x: x<5, [1,4,6,3,8]) → 1 4 for x in iterable: - if predicate(x): - yield x - else: + if not predicate(x): break + yield x Note, the element that first fails the predicate condition is consumed from the input iterator and there is no way to access it. This could be an issue if an application wants to further consume the - input iterator after takewhile has been run to exhaustion. To work + input iterator after *takewhile* has been run to exhaustion. To work around this problem, consider using `more-iterools before_and_after() `_ instead. @@ -696,24 +681,23 @@ loops that truncate the stream. Return *n* independent iterators from a single iterable. - The following Python code helps explain what *tee* does (although the actual - implementation is more complex and uses only a single underlying - :abbr:`FIFO (first-in, first-out)` queue):: + Roughly equivalent to:: def tee(iterable, n=2): - it = iter(iterable) - deques = [collections.deque() for i in range(n)] - def gen(mydeque): + iterator = iter(iterable) + shared_link = [None, None] + return tuple(_tee(iterator, shared_link) for _ in range(n)) + + def _tee(iterator, link): + try: while True: - if not mydeque: # when the local deque is empty - try: - newval = next(it) # fetch a new value and - except StopIteration: - return - for d in deques: # load it to all the deques - d.append(newval) - yield mydeque.popleft() - return tuple(gen(d) for d in deques) + if link[1] is None: + link[0] = next(iterator) + link[1] = [None, None] + value, link = link + yield value + except StopIteration: + return Once a :func:`tee` has been created, the original *iterable* should not be used anywhere else; otherwise, the *iterable* could get advanced without @@ -731,21 +715,29 @@ loops that truncate the stream. .. function:: zip_longest(*iterables, fillvalue=None) - Make an iterator that aggregates elements from each of the iterables. If the - iterables are of uneven length, missing values are filled-in with *fillvalue*. - Iteration continues until the longest iterable is exhausted. Roughly equivalent to:: + Make an iterator that aggregates elements from each of the + *iterables*. + + If the iterables are of uneven length, missing values are filled-in + with *fillvalue*. If not specified, *fillvalue* defaults to ``None``. + + Iteration continues until the longest iterable is exhausted. + + Roughly equivalent to:: - def zip_longest(*args, fillvalue=None): + def zip_longest(*iterables, fillvalue=None): # zip_longest('ABCD', 'xy', fillvalue='-') → Ax By C- D- - iterators = [iter(it) for it in args] + + iterators = list(map(iter, iterables)) num_active = len(iterators) if not num_active: return + while True: values = [] - for i, it in enumerate(iterators): + for i, iterator in enumerate(iterators): try: - value = next(it) + value = next(iterator) except StopIteration: num_active -= 1 if not num_active: @@ -757,8 +749,7 @@ loops that truncate the stream. If one of the iterables is potentially infinite, then the :func:`zip_longest` function should be wrapped with something that limits the number of calls - (for example :func:`islice` or :func:`takewhile`). If not specified, - *fillvalue* defaults to ``None``. + (for example :func:`islice` or :func:`takewhile`). .. _itertools-recipes: @@ -800,6 +791,7 @@ and :term:`generators ` which incur interpreter overhead. .. testcode:: import collections + import contextlib import functools import math import operator @@ -865,7 +857,7 @@ and :term:`generators ` which incur interpreter overhead. return len(take(2, groupby(iterable, key))) <= 1 def unique_justseen(iterable, key=None): - "List unique elements, preserving order. Remember only the element just seen." + "Yield unique elements, preserving order. Remember only the element just seen." # unique_justseen('AAAABBBCCDAABBB') → A B C D A B # unique_justseen('ABBcCAD', str.casefold) → A B c A D if key is None: @@ -873,7 +865,7 @@ and :term:`generators ` which incur interpreter overhead. return map(next, map(operator.itemgetter(1), groupby(iterable, key))) def unique_everseen(iterable, key=None): - "List unique elements, preserving order. Remember all elements ever seen." + "Yield unique elements, preserving order. Remember all elements ever seen." # unique_everseen('AAAABBBCCDAABBB') → A B C D # unique_everseen('ABBcCAD', str.casefold) → A B c D seen = set() @@ -888,6 +880,11 @@ and :term:`generators ` which incur interpreter overhead. seen.add(k) yield element + def unique(iterable, key=None, reverse=False): + "Yield unique elements in sorted order. Supports unhashable inputs." + # unique([[1, 2], [3, 4], [1, 2]]) → [1, 2] [3, 4] + return unique_justseen(sorted(iterable, key=key, reverse=reverse), key=key) + def sliding_window(iterable, n): "Collect data into overlapping fixed-length chunks or blocks." # sliding_window('ABCDEFG', 4) → ABCD BCDE CDEF DEFG @@ -942,32 +939,26 @@ and :term:`generators ` which incur interpreter overhead. # iter_index('AABCADEAF', 'A') → 0 1 4 7 seq_index = getattr(iterable, 'index', None) if seq_index is None: - # Path for general iterables iterator = islice(iterable, start, stop) for i, element in enumerate(iterator, start): if element is value or element == value: yield i else: - # Path for sequences with an index() method stop = len(iterable) if stop is None else stop i = start - try: + with contextlib.suppress(ValueError): while True: yield (i := seq_index(value, i, stop)) i += 1 - except ValueError: - pass def iter_except(func, exception, first=None): "Convert a call-until-exception interface to an iterator interface." # iter_except(d.popitem, KeyError) → non-blocking dictionary iterator - try: + with contextlib.suppress(exception): if first is not None: yield first() while True: yield func() - except exception: - pass The following recipes have a more mathematical flavor: @@ -1619,6 +1610,13 @@ The following recipes have a more mathematical flavor: >>> ''.join(input_iterator) 'AAABBBCCDAABBB' + >>> list(unique([[1, 2], [3, 4], [1, 2]])) + [[1, 2], [3, 4]] + >>> list(unique('ABBcCAD', str.casefold)) + ['A', 'B', 'c', 'D'] + >>> list(unique('ABBcCAD', str.casefold, reverse=True)) + ['D', 'c', 'B', 'A'] + >>> d = dict(a=1, b=2, c=3) >>> it = iter_except(d.popitem, KeyError) >>> d['d'] = 4 diff --git a/Doc/library/json.rst b/Doc/library/json.rst index c82ff9dc325b4c..42cb1f850fe9c5 100644 --- a/Doc/library/json.rst +++ b/Doc/library/json.rst @@ -1,5 +1,5 @@ -:mod:`json` --- JSON encoder and decoder -======================================== +:mod:`!json` --- JSON encoder and decoder +========================================= .. module:: json :synopsis: Encode and decode the JSON format. diff --git a/Doc/library/keyword.rst b/Doc/library/keyword.rst index c3b4699cb05af6..ac57140f888024 100644 --- a/Doc/library/keyword.rst +++ b/Doc/library/keyword.rst @@ -1,5 +1,5 @@ -:mod:`keyword` --- Testing for Python keywords -============================================== +:mod:`!keyword` --- Testing for Python keywords +=============================================== .. module:: keyword :synopsis: Test whether a string is a keyword in Python. diff --git a/Doc/library/linecache.rst b/Doc/library/linecache.rst index dd9f4ee45ba82e..88c6079a05b7fa 100644 --- a/Doc/library/linecache.rst +++ b/Doc/library/linecache.rst @@ -1,5 +1,5 @@ -:mod:`linecache` --- Random access to text lines -================================================ +:mod:`!linecache` --- Random access to text lines +================================================= .. module:: linecache :synopsis: Provides random access to individual lines from text files. diff --git a/Doc/library/locale.rst b/Doc/library/locale.rst index 414979524e57b6..0a8cbd4f95f473 100644 --- a/Doc/library/locale.rst +++ b/Doc/library/locale.rst @@ -1,5 +1,5 @@ -:mod:`locale` --- Internationalization services -=============================================== +:mod:`!locale` --- Internationalization services +================================================ .. module:: locale :synopsis: Internationalization services. diff --git a/Doc/library/logging.config.rst b/Doc/library/logging.config.rst index 13850c91446da5..dfbf0b1cf2f9ff 100644 --- a/Doc/library/logging.config.rst +++ b/Doc/library/logging.config.rst @@ -1,5 +1,5 @@ -:mod:`logging.config` --- Logging configuration -=============================================== +:mod:`!logging.config` --- Logging configuration +================================================ .. module:: logging.config :synopsis: Configuration of the logging module. diff --git a/Doc/library/logging.handlers.rst b/Doc/library/logging.handlers.rst index 2fe9370333beaf..5a081f9e7add99 100644 --- a/Doc/library/logging.handlers.rst +++ b/Doc/library/logging.handlers.rst @@ -1,5 +1,5 @@ -:mod:`logging.handlers` --- Logging handlers -============================================ +:mod:`!logging.handlers` --- Logging handlers +============================================= .. module:: logging.handlers :synopsis: Handlers for the logging module. @@ -66,7 +66,7 @@ and :meth:`flush` methods). :param stream: The stream that the handler should use. - :return: the old stream, if the stream was changed, or *None* if it wasn't. + :return: the old stream, if the stream was changed, or ``None`` if it wasn't. .. versionadded:: 3.7 diff --git a/Doc/library/logging.rst b/Doc/library/logging.rst index a733b288ecb6d0..0624423c950797 100644 --- a/Doc/library/logging.rst +++ b/Doc/library/logging.rst @@ -1,5 +1,5 @@ -:mod:`logging` --- Logging facility for Python -============================================== +:mod:`!logging` --- Logging facility for Python +=============================================== .. module:: logging :synopsis: Flexible event logging system for applications. @@ -109,11 +109,11 @@ The ``name`` is potentially a period-separated hierarchical value, like Loggers that are further down in the hierarchical list are children of loggers higher up in the list. For example, given a logger with a name of ``foo``, loggers with names of ``foo.bar``, ``foo.bar.baz``, and ``foo.bam`` are all -descendants of ``foo``. The logger name hierarchy is analogous to the Python -package hierarchy, and identical to it if you organise your loggers on a -per-module basis using the recommended construction -``logging.getLogger(__name__)``. That's because in a module, ``__name__`` -is the module's name in the Python package namespace. +descendants of ``foo``. In addition, all loggers are descendants of the root +logger. The logger name hierarchy is analogous to the Python package hierarchy, +and identical to it if you organise your loggers on a per-module basis using +the recommended construction ``logging.getLogger(__name__)``. That's because +in a module, ``__name__`` is the module's name in the Python package namespace. .. class:: Logger @@ -1157,10 +1157,12 @@ functions. .. function:: getLogger(name=None) - Return a logger with the specified name or, if name is ``None``, return a - logger which is the root logger of the hierarchy. If specified, the name is - typically a dot-separated hierarchical name like *'a'*, *'a.b'* or *'a.b.c.d'*. - Choice of these names is entirely up to the developer who is using logging. + Return a logger with the specified name or, if name is ``None``, return the + root logger of the hierarchy. If specified, the name is typically a + dot-separated hierarchical name like *'a'*, *'a.b'* or *'a.b.c.d'*. Choice + of these names is entirely up to the developer who is using logging, though + it is recommended that ``__name__`` be used unless you have a specific + reason for not doing that, as mentioned in :ref:`logger`. All calls to this function with a given name return the same logger instance. This means that logger instances never need to be passed between different parts diff --git a/Doc/library/lzma.rst b/Doc/library/lzma.rst index 74bf2670f9def6..69f7cb8d48d7ae 100644 --- a/Doc/library/lzma.rst +++ b/Doc/library/lzma.rst @@ -1,5 +1,5 @@ -:mod:`lzma` --- Compression using the LZMA algorithm -==================================================== +:mod:`!lzma` --- Compression using the LZMA algorithm +===================================================== .. module:: lzma :synopsis: A Python wrapper for the liblzma compression library. diff --git a/Doc/library/mailbox.rst b/Doc/library/mailbox.rst index a613548c9e518e..40ea71cd342b47 100644 --- a/Doc/library/mailbox.rst +++ b/Doc/library/mailbox.rst @@ -1,5 +1,5 @@ -:mod:`mailbox` --- Manipulate mailboxes in various formats -========================================================== +:mod:`!mailbox` --- Manipulate mailboxes in various formats +=========================================================== .. module:: mailbox :synopsis: Manipulate mailboxes in various formats diff --git a/Doc/library/marshal.rst b/Doc/library/marshal.rst index c6a006b7b4028a..f9ba4d554b0c22 100644 --- a/Doc/library/marshal.rst +++ b/Doc/library/marshal.rst @@ -1,5 +1,5 @@ -:mod:`marshal` --- Internal Python object serialization -======================================================= +:mod:`!marshal` --- Internal Python object serialization +======================================================== .. module:: marshal :synopsis: Convert Python objects to streams of bytes and back (with different @@ -10,7 +10,7 @@ This module contains functions that can read and write Python values in a binary format. The format is specific to Python, but independent of machine architecture issues (e.g., you can write a Python value to a file on a PC, -transport the file to a Sun, and read it back there). Details of the format are +transport the file to a Mac, and read it back there). Details of the format are undocumented on purpose; it may change between Python versions (although it rarely does). [#]_ diff --git a/Doc/library/math.rst b/Doc/library/math.rst index ab9236cc7cfb81..316144992d6832 100644 --- a/Doc/library/math.rst +++ b/Doc/library/math.rst @@ -1,5 +1,5 @@ -:mod:`math` --- Mathematical functions -====================================== +:mod:`!math` --- Mathematical functions +======================================= .. module:: math :synopsis: Mathematical functions (sin() etc.). @@ -268,7 +268,7 @@ Number-theoretic and representation functions Evaluates to ``n! / (n - k)!`` when ``k <= n`` and evaluates to zero when ``k > n``. - If *k* is not specified or is None, then *k* defaults to *n* + If *k* is not specified or is ``None``, then *k* defaults to *n* and the function returns ``n!``. Raises :exc:`TypeError` if either of the arguments are not integers. diff --git a/Doc/library/mimetypes.rst b/Doc/library/mimetypes.rst index a24eab21d57343..91e8c30f8607b3 100644 --- a/Doc/library/mimetypes.rst +++ b/Doc/library/mimetypes.rst @@ -1,5 +1,5 @@ -:mod:`mimetypes` --- Map filenames to MIME types -================================================ +:mod:`!mimetypes` --- Map filenames to MIME types +================================================= .. module:: mimetypes :synopsis: Mapping of filename extensions to MIME types. diff --git a/Doc/library/mmap.rst b/Doc/library/mmap.rst index 758721433f77de..4e20c07331a220 100644 --- a/Doc/library/mmap.rst +++ b/Doc/library/mmap.rst @@ -1,5 +1,5 @@ -:mod:`mmap` --- Memory-mapped file support -========================================== +:mod:`!mmap` --- Memory-mapped file support +=========================================== .. module:: mmap :synopsis: Interface to memory-mapped files for Unix and Windows. diff --git a/Doc/library/modulefinder.rst b/Doc/library/modulefinder.rst index 526f0ff868c2b7..823d853f1ed8eb 100644 --- a/Doc/library/modulefinder.rst +++ b/Doc/library/modulefinder.rst @@ -1,5 +1,5 @@ -:mod:`modulefinder` --- Find modules used by a script -===================================================== +:mod:`!modulefinder` --- Find modules used by a script +====================================================== .. module:: modulefinder :synopsis: Find modules used by a script. diff --git a/Doc/library/msvcrt.rst b/Doc/library/msvcrt.rst index 72da777cd1407e..327cc3602b1a77 100644 --- a/Doc/library/msvcrt.rst +++ b/Doc/library/msvcrt.rst @@ -1,5 +1,5 @@ -:mod:`msvcrt` --- Useful routines from the MS VC++ runtime -========================================================== +:mod:`!msvcrt` --- Useful routines from the MS VC++ runtime +=========================================================== .. module:: msvcrt :platform: Windows diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst index afc148c78e97bd..49762491bae5f4 100644 --- a/Doc/library/multiprocessing.rst +++ b/Doc/library/multiprocessing.rst @@ -1,5 +1,5 @@ -:mod:`multiprocessing` --- Process-based parallelism -==================================================== +:mod:`!multiprocessing` --- Process-based parallelism +===================================================== .. module:: multiprocessing :synopsis: Process-based parallelism. @@ -2483,9 +2483,9 @@ multiple connections at the same time. generally be omitted since it can usually be inferred from the format of *address*. (See :ref:`multiprocessing-address-formats`) - If *authkey* is given and not None, it should be a byte string and will be + If *authkey* is given and not ``None``, it should be a byte string and will be used as the secret key for an HMAC-based authentication challenge. No - authentication is done if *authkey* is None. + authentication is done if *authkey* is ``None``. :exc:`~multiprocessing.AuthenticationError` is raised if authentication fails. See :ref:`multiprocessing-auth-keys`. @@ -2518,9 +2518,9 @@ multiple connections at the same time. to the :meth:`~socket.socket.listen` method of the socket once it has been bound. - If *authkey* is given and not None, it should be a byte string and will be + If *authkey* is given and not ``None``, it should be a byte string and will be used as the secret key for an HMAC-based authentication challenge. No - authentication is done if *authkey* is None. + authentication is done if *authkey* is ``None``. :exc:`~multiprocessing.AuthenticationError` is raised if authentication fails. See :ref:`multiprocessing-auth-keys`. diff --git a/Doc/library/multiprocessing.shared_memory.rst b/Doc/library/multiprocessing.shared_memory.rst index 933fd07d62418a..e8f04a6ac7b95d 100644 --- a/Doc/library/multiprocessing.shared_memory.rst +++ b/Doc/library/multiprocessing.shared_memory.rst @@ -1,5 +1,5 @@ -:mod:`multiprocessing.shared_memory` --- Shared memory for direct access across processes -========================================================================================= +:mod:`!multiprocessing.shared_memory` --- Shared memory for direct access across processes +========================================================================================== .. module:: multiprocessing.shared_memory :synopsis: Provides shared memory for direct access across processes. diff --git a/Doc/library/netrc.rst b/Doc/library/netrc.rst index c36e5cfecfc6a8..f6260383b2b057 100644 --- a/Doc/library/netrc.rst +++ b/Doc/library/netrc.rst @@ -1,6 +1,5 @@ - -:mod:`netrc` --- netrc file processing -====================================== +:mod:`!netrc` --- netrc file processing +======================================= .. module:: netrc :synopsis: Loading of .netrc files. diff --git a/Doc/library/numbers.rst b/Doc/library/numbers.rst index 306bdd94aaca13..d0ae79c7a3df76 100644 --- a/Doc/library/numbers.rst +++ b/Doc/library/numbers.rst @@ -1,5 +1,5 @@ -:mod:`numbers` --- Numeric abstract base classes -================================================ +:mod:`!numbers` --- Numeric abstract base classes +================================================= .. module:: numbers :synopsis: Numeric abstract base classes (Complex, Real, Integral, etc.). @@ -84,10 +84,10 @@ The numeric tower ``~``. -Notes for type implementors +Notes for type implementers --------------------------- -Implementors should be careful to make equal numbers equal and hash +Implementers should be careful to make equal numbers equal and hash them to the same values. This may be subtle if there are two different extensions of the real numbers. For example, :class:`fractions.Fraction` implements :func:`hash` as follows:: diff --git a/Doc/library/operator.rst b/Doc/library/operator.rst index 96f2c287875d41..a9a6026af406fe 100644 --- a/Doc/library/operator.rst +++ b/Doc/library/operator.rst @@ -1,5 +1,5 @@ -:mod:`operator` --- Standard operators as functions -=================================================== +:mod:`!operator` --- Standard operators as functions +==================================================== .. module:: operator :synopsis: Functions corresponding to the standard operators. diff --git a/Doc/library/optparse.rst b/Doc/library/optparse.rst index 015e83ed2ce5f7..3e96259f94d47b 100644 --- a/Doc/library/optparse.rst +++ b/Doc/library/optparse.rst @@ -1,5 +1,5 @@ -:mod:`optparse` --- Parser for command line options -=================================================== +:mod:`!optparse` --- Parser for command line options +==================================================== .. module:: optparse :synopsis: Command-line option parsing library. @@ -1739,7 +1739,7 @@ seen, but blow up if it comes after ``-b`` in the command-line. :: Callback example 3: check option order (generalized) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -If you want to re-use this callback for several similar options (set a flag, but +If you want to reuse this callback for several similar options (set a flag, but blow up if ``-b`` has already been seen), it needs a bit of work: the error message and the flag that it sets must be generalized. :: diff --git a/Doc/library/os.path.rst b/Doc/library/os.path.rst index 24d423eb5fbfed..b582321515db56 100644 --- a/Doc/library/os.path.rst +++ b/Doc/library/os.path.rst @@ -1,5 +1,5 @@ -:mod:`os.path` --- Common pathname manipulations -================================================ +:mod:`!os.path` --- Common pathname manipulations +================================================= .. module:: os.path :synopsis: Operations on pathnames. diff --git a/Doc/library/os.rst b/Doc/library/os.rst index 6c92eed9c063f1..b93b06d4e72afc 100644 --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -1,5 +1,5 @@ -:mod:`os` --- Miscellaneous operating system interfaces -======================================================= +:mod:`!os` --- Miscellaneous operating system interfaces +======================================================== .. module:: os :synopsis: Miscellaneous operating system interfaces. @@ -923,7 +923,7 @@ as internal buffering of data. Copy *count* bytes from file descriptor *src*, starting from offset *offset_src*, to file descriptor *dst*, starting from offset *offset_dst*. - If *offset_src* is None, then *src* is read from the current position; + If *offset_src* is ``None``, then *src* is read from the current position; respectively for *offset_dst*. In Linux kernel older than 5.3, the files pointed to by *src* and *dst* @@ -1718,7 +1718,7 @@ or `the MSDN `_ on Windo Transfer *count* bytes from file descriptor *src*, starting from offset *offset_src*, to file descriptor *dst*, starting from offset *offset_dst*. At least one of the file descriptors must refer to a pipe. If *offset_src* - is None, then *src* is read from the current position; respectively for + is ``None``, then *src* is read from the current position; respectively for *offset_dst*. The offset associated to the file descriptor that refers to a pipe must be ``None``. The files pointed to by *src* and *dst* must reside in the same filesystem, otherwise an :exc:`OSError` is raised with diff --git a/Doc/library/pathlib.rst b/Doc/library/pathlib.rst index 2e18e41869376e..ab109dc6a47a32 100644 --- a/Doc/library/pathlib.rst +++ b/Doc/library/pathlib.rst @@ -1,6 +1,5 @@ - -:mod:`pathlib` --- Object-oriented filesystem paths -=================================================== +:mod:`!pathlib` --- Object-oriented filesystem paths +==================================================== .. module:: pathlib :synopsis: Object-oriented filesystem paths @@ -628,8 +627,8 @@ Pure paths provide the following methods and properties: raise ValueError(error_message.format(str(self), str(formatted))) ValueError: '/etc/passwd' is not in the subpath of '/usr' OR one path is relative and the other is absolute. - When *walk_up* is False (the default), the path must start with *other*. - When the argument is True, ``..`` entries may be added to form the + When *walk_up* is false (the default), the path must start with *other*. + When the argument is true, ``..`` entries may be added to form the relative path. In all other cases, such as the paths referencing different drives, :exc:`ValueError` is raised.:: @@ -808,9 +807,12 @@ bugs or failures in your application):: % (cls.__name__,)) UnsupportedOperation: cannot instantiate 'WindowsPath' on your system +Some concrete path methods can raise an :exc:`OSError` if a system call fails +(for example because the path doesn't exist). -File URIs -^^^^^^^^^ + +Parsing and generating URIs +^^^^^^^^^^^^^^^^^^^^^^^^^^^ Concrete path objects can be created from, and represented as, 'file' URIs conforming to :rfc:`8089`. @@ -870,12 +872,8 @@ conforming to :rfc:`8089`. it strictly impure. -Methods -^^^^^^^ - -Concrete paths provide the following methods in addition to pure paths -methods. Many of these methods can raise an :exc:`OSError` if a system -call fails (for example because the path doesn't exist). +Querying file type and status +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. versionchanged:: 3.8 @@ -887,29 +885,6 @@ call fails (for example because the path doesn't exist). unrepresentable at the OS level. -.. classmethod:: Path.cwd() - - Return a new path object representing the current directory (as returned - by :func:`os.getcwd`):: - - >>> Path.cwd() - PosixPath('/home/antoine/pathlib') - - -.. classmethod:: Path.home() - - Return a new path object representing the user's home directory (as - returned by :func:`os.path.expanduser` with ``~`` construct). If the home - directory can't be resolved, :exc:`RuntimeError` is raised. - - :: - - >>> Path.home() - PosixPath('/home/antoine') - - .. versionadded:: 3.5 - - .. method:: Path.stat(*, follow_symlinks=True) Return a :class:`os.stat_result` object containing information about this path, like :func:`os.stat`. @@ -929,25 +904,12 @@ call fails (for example because the path doesn't exist). .. versionchanged:: 3.10 The *follow_symlinks* parameter was added. -.. method:: Path.chmod(mode, *, follow_symlinks=True) - - Change the file mode and permissions, like :func:`os.chmod`. - - This method normally follows symlinks. Some Unix flavours support changing - permissions on the symlink itself; on these platforms you may add the - argument ``follow_symlinks=False``, or use :meth:`~Path.lchmod`. - :: +.. method:: Path.lstat() - >>> p = Path('setup.py') - >>> p.stat().st_mode - 33277 - >>> p.chmod(0o444) - >>> p.stat().st_mode - 33060 + Like :meth:`Path.stat` but, if the path points to a symbolic link, return + the symbolic link's information rather than its target's. - .. versionchanged:: 3.10 - The *follow_symlinks* parameter was added. .. method:: Path.exists(*, follow_symlinks=True) @@ -970,97 +932,17 @@ call fails (for example because the path doesn't exist). .. versionchanged:: 3.12 The *follow_symlinks* parameter was added. -.. method:: Path.expanduser() - - Return a new path with expanded ``~`` and ``~user`` constructs, - as returned by :meth:`os.path.expanduser`. If a home directory can't be - resolved, :exc:`RuntimeError` is raised. - - :: - - >>> p = PosixPath('~/films/Monty Python') - >>> p.expanduser() - PosixPath('/home/eric/films/Monty Python') - - .. versionadded:: 3.5 - - -.. method:: Path.glob(pattern, *, case_sensitive=None, recurse_symlinks=False) - - Glob the given relative *pattern* in the directory represented by this path, - yielding all matching files (of any kind):: - - >>> sorted(Path('.').glob('*.py')) - [PosixPath('pathlib.py'), PosixPath('setup.py'), PosixPath('test_pathlib.py')] - >>> sorted(Path('.').glob('*/*.py')) - [PosixPath('docs/conf.py')] - >>> sorted(Path('.').glob('**/*.py')) - [PosixPath('build/lib/pathlib.py'), - PosixPath('docs/conf.py'), - PosixPath('pathlib.py'), - PosixPath('setup.py'), - PosixPath('test_pathlib.py')] - - .. seealso:: - :ref:`pathlib-pattern-language` documentation. - - By default, or when the *case_sensitive* keyword-only argument is set to - ``None``, this method matches paths using platform-specific casing rules: - typically, case-sensitive on POSIX, and case-insensitive on Windows. - Set *case_sensitive* to ``True`` or ``False`` to override this behaviour. - - By default, or when the *recurse_symlinks* keyword-only argument is set to - ``False``, this method follows symlinks except when expanding "``**``" - wildcards. Set *recurse_symlinks* to ``True`` to always follow symlinks. - - .. audit-event:: pathlib.Path.glob self,pattern pathlib.Path.glob - - .. versionchanged:: 3.12 - The *case_sensitive* parameter was added. - - .. versionchanged:: 3.13 - The *recurse_symlinks* parameter was added. - - .. versionchanged:: 3.13 - The *pattern* parameter accepts a :term:`path-like object`. - - .. versionchanged:: 3.13 - Any :exc:`OSError` exceptions raised from scanning the filesystem are - suppressed. In previous versions, such exceptions are suppressed in many - cases, but not all. - - -.. method:: Path.rglob(pattern, *, case_sensitive=None, recurse_symlinks=False) - - Glob the given relative *pattern* recursively. This is like calling - :func:`Path.glob` with "``**/``" added in front of the *pattern*. - - .. seealso:: - :ref:`pathlib-pattern-language` and :meth:`Path.glob` documentation. - - .. audit-event:: pathlib.Path.rglob self,pattern pathlib.Path.rglob - - .. versionchanged:: 3.12 - The *case_sensitive* parameter was added. - - .. versionchanged:: 3.13 - The *recurse_symlinks* parameter was added. - - .. versionchanged:: 3.13 - The *pattern* parameter accepts a :term:`path-like object`. - -.. method:: Path.group(*, follow_symlinks=True) +.. method:: Path.is_file(*, follow_symlinks=True) - Return the name of the group owning the file. :exc:`KeyError` is raised - if the file's gid isn't found in the system database. + Return ``True`` if the path points to a regular file, ``False`` if it + points to another kind of file. - This method normally follows symlinks; to get the group of the symlink, add - the argument ``follow_symlinks=False``. + ``False`` is also returned if the path doesn't exist or is a broken symlink; + other errors (such as permission errors) are propagated. - .. versionchanged:: 3.13 - Raises :exc:`UnsupportedOperation` if the :mod:`grp` module is not - available. In previous versions, :exc:`NotImplementedError` was raised. + This method normally follows symlinks; to exclude symlinks, add the + argument ``follow_symlinks=False``. .. versionchanged:: 3.13 The *follow_symlinks* parameter was added. @@ -1081,19 +963,12 @@ call fails (for example because the path doesn't exist). The *follow_symlinks* parameter was added. -.. method:: Path.is_file(*, follow_symlinks=True) - - Return ``True`` if the path points to a regular file, ``False`` if it - points to another kind of file. - - ``False`` is also returned if the path doesn't exist or is a broken symlink; - other errors (such as permission errors) are propagated. +.. method:: Path.is_symlink() - This method normally follows symlinks; to exclude symlinks, add the - argument ``follow_symlinks=False``. + Return ``True`` if the path points to a symbolic link, ``False`` otherwise. - .. versionchanged:: 3.13 - The *follow_symlinks* parameter was added. + ``False`` is also returned if the path doesn't exist; other errors (such + as permission errors) are propagated. .. method:: Path.is_junction() @@ -1121,14 +996,6 @@ call fails (for example because the path doesn't exist). Windows support was added. -.. method:: Path.is_symlink() - - Return ``True`` if the path points to a symbolic link, ``False`` otherwise. - - ``False`` is also returned if the path doesn't exist; other errors (such - as permission errors) are propagated. - - .. method:: Path.is_socket() Return ``True`` if the path points to a Unix socket (or a symbolic link @@ -1165,87 +1032,263 @@ call fails (for example because the path doesn't exist). other errors (such as permission errors) are propagated. -.. method:: Path.iterdir() +.. method:: Path.samefile(other_path) - When the path points to a directory, yield path objects of the directory - contents:: + Return whether this path points to the same file as *other_path*, which + can be either a Path object, or a string. The semantics are similar + to :func:`os.path.samefile` and :func:`os.path.samestat`. - >>> p = Path('docs') - >>> for child in p.iterdir(): child - ... - PosixPath('docs/conf.py') - PosixPath('docs/_templates') - PosixPath('docs/make.bat') - PosixPath('docs/index.rst') - PosixPath('docs/_build') - PosixPath('docs/_static') - PosixPath('docs/Makefile') + An :exc:`OSError` can be raised if either file cannot be accessed for some + reason. - The children are yielded in arbitrary order, and the special entries - ``'.'`` and ``'..'`` are not included. If a file is removed from or added - to the directory after creating the iterator, whether a path object for - that file be included is unspecified. + :: -.. method:: Path.walk(top_down=True, on_error=None, follow_symlinks=False) + >>> p = Path('spam') + >>> q = Path('eggs') + >>> p.samefile(q) + False + >>> p.samefile('spam') + True - Generate the file names in a directory tree by walking the tree - either top-down or bottom-up. + .. versionadded:: 3.5 - For each directory in the directory tree rooted at *self* (including - *self* but excluding '.' and '..'), the method yields a 3-tuple of - ``(dirpath, dirnames, filenames)``. - *dirpath* is a :class:`Path` to the directory currently being walked, - *dirnames* is a list of strings for the names of subdirectories in *dirpath* - (excluding ``'.'`` and ``'..'``), and *filenames* is a list of strings for - the names of the non-directory files in *dirpath*. To get a full path - (which begins with *self*) to a file or directory in *dirpath*, do - ``dirpath / name``. Whether or not the lists are sorted is file - system-dependent. +Reading and writing files +^^^^^^^^^^^^^^^^^^^^^^^^^ - If the optional argument *top_down* is true (which is the default), the triple for a - directory is generated before the triples for any of its subdirectories - (directories are walked top-down). If *top_down* is false, the triple - for a directory is generated after the triples for all of its subdirectories - (directories are walked bottom-up). No matter the value of *top_down*, the - list of subdirectories is retrieved before the triples for the directory and - its subdirectories are walked. - When *top_down* is true, the caller can modify the *dirnames* list in-place - (for example, using :keyword:`del` or slice assignment), and :meth:`Path.walk` - will only recurse into the subdirectories whose names remain in *dirnames*. - This can be used to prune the search, or to impose a specific order of visiting, - or even to inform :meth:`Path.walk` about directories the caller creates or - renames before it resumes :meth:`Path.walk` again. Modifying *dirnames* when - *top_down* is false has no effect on the behavior of :meth:`Path.walk()` since the - directories in *dirnames* have already been generated by the time *dirnames* - is yielded to the caller. +.. method:: Path.open(mode='r', buffering=-1, encoding=None, errors=None, newline=None) - By default, errors from :func:`os.scandir` are ignored. If the optional - argument *on_error* is specified, it should be a callable; it will be - called with one argument, an :exc:`OSError` instance. The callable can handle the - error to continue the walk or re-raise it to stop the walk. Note that the - filename is available as the ``filename`` attribute of the exception object. + Open the file pointed to by the path, like the built-in :func:`open` + function does:: - By default, :meth:`Path.walk` does not follow symbolic links, and instead adds them - to the *filenames* list. Set *follow_symlinks* to true to resolve symlinks - and place them in *dirnames* and *filenames* as appropriate for their targets, and - consequently visit directories pointed to by symlinks (where supported). + >>> p = Path('setup.py') + >>> with p.open() as f: + ... f.readline() + ... + '#!/usr/bin/env python3\n' - .. note:: - Be aware that setting *follow_symlinks* to true can lead to infinite - recursion if a link points to a parent directory of itself. :meth:`Path.walk` - does not keep track of the directories it has already visited. +.. method:: Path.read_text(encoding=None, errors=None, newline=None) - .. note:: - :meth:`Path.walk` assumes the directories it walks are not modified during - execution. For example, if a directory from *dirnames* has been replaced - with a symlink and *follow_symlinks* is false, :meth:`Path.walk` will - still try to descend into it. To prevent such behavior, remove directories - from *dirnames* as appropriate. + Return the decoded contents of the pointed-to file as a string:: - .. note:: + >>> p = Path('my_text_file') + >>> p.write_text('Text file contents') + 18 + >>> p.read_text() + 'Text file contents' + + The file is opened and then closed. The optional parameters have the same + meaning as in :func:`open`. + + .. versionadded:: 3.5 + + .. versionchanged:: 3.13 + The *newline* parameter was added. + + +.. method:: Path.read_bytes() + + Return the binary contents of the pointed-to file as a bytes object:: + + >>> p = Path('my_binary_file') + >>> p.write_bytes(b'Binary file contents') + 20 + >>> p.read_bytes() + b'Binary file contents' + + .. versionadded:: 3.5 + + +.. method:: Path.write_text(data, encoding=None, errors=None, newline=None) + + Open the file pointed to in text mode, write *data* to it, and close the + file:: + + >>> p = Path('my_text_file') + >>> p.write_text('Text file contents') + 18 + >>> p.read_text() + 'Text file contents' + + An existing file of the same name is overwritten. The optional parameters + have the same meaning as in :func:`open`. + + .. versionadded:: 3.5 + + .. versionchanged:: 3.10 + The *newline* parameter was added. + + +.. method:: Path.write_bytes(data) + + Open the file pointed to in bytes mode, write *data* to it, and close the + file:: + + >>> p = Path('my_binary_file') + >>> p.write_bytes(b'Binary file contents') + 20 + >>> p.read_bytes() + b'Binary file contents' + + An existing file of the same name is overwritten. + + .. versionadded:: 3.5 + + +Reading directories +^^^^^^^^^^^^^^^^^^^ + +.. method:: Path.iterdir() + + When the path points to a directory, yield path objects of the directory + contents:: + + >>> p = Path('docs') + >>> for child in p.iterdir(): child + ... + PosixPath('docs/conf.py') + PosixPath('docs/_templates') + PosixPath('docs/make.bat') + PosixPath('docs/index.rst') + PosixPath('docs/_build') + PosixPath('docs/_static') + PosixPath('docs/Makefile') + + The children are yielded in arbitrary order, and the special entries + ``'.'`` and ``'..'`` are not included. If a file is removed from or added + to the directory after creating the iterator, it is unspecified whether + a path object for that file is included. + + If the path is not a directory or otherwise inaccessible, :exc:`OSError` is + raised. + +.. method:: Path.glob(pattern, *, case_sensitive=None, recurse_symlinks=False) + + Glob the given relative *pattern* in the directory represented by this path, + yielding all matching files (of any kind):: + + >>> sorted(Path('.').glob('*.py')) + [PosixPath('pathlib.py'), PosixPath('setup.py'), PosixPath('test_pathlib.py')] + >>> sorted(Path('.').glob('*/*.py')) + [PosixPath('docs/conf.py')] + >>> sorted(Path('.').glob('**/*.py')) + [PosixPath('build/lib/pathlib.py'), + PosixPath('docs/conf.py'), + PosixPath('pathlib.py'), + PosixPath('setup.py'), + PosixPath('test_pathlib.py')] + + .. seealso:: + :ref:`pathlib-pattern-language` documentation. + + By default, or when the *case_sensitive* keyword-only argument is set to + ``None``, this method matches paths using platform-specific casing rules: + typically, case-sensitive on POSIX, and case-insensitive on Windows. + Set *case_sensitive* to ``True`` or ``False`` to override this behaviour. + + By default, or when the *recurse_symlinks* keyword-only argument is set to + ``False``, this method follows symlinks except when expanding "``**``" + wildcards. Set *recurse_symlinks* to ``True`` to always follow symlinks. + + .. audit-event:: pathlib.Path.glob self,pattern pathlib.Path.glob + + .. versionchanged:: 3.12 + The *case_sensitive* parameter was added. + + .. versionchanged:: 3.13 + The *recurse_symlinks* parameter was added. + + .. versionchanged:: 3.13 + The *pattern* parameter accepts a :term:`path-like object`. + + .. versionchanged:: 3.13 + Any :exc:`OSError` exceptions raised from scanning the filesystem are + suppressed. In previous versions, such exceptions are suppressed in many + cases, but not all. + + +.. method:: Path.rglob(pattern, *, case_sensitive=None, recurse_symlinks=False) + + Glob the given relative *pattern* recursively. This is like calling + :func:`Path.glob` with "``**/``" added in front of the *pattern*. + + .. seealso:: + :ref:`pathlib-pattern-language` and :meth:`Path.glob` documentation. + + .. audit-event:: pathlib.Path.rglob self,pattern pathlib.Path.rglob + + .. versionchanged:: 3.12 + The *case_sensitive* parameter was added. + + .. versionchanged:: 3.13 + The *recurse_symlinks* parameter was added. + + .. versionchanged:: 3.13 + The *pattern* parameter accepts a :term:`path-like object`. + + +.. method:: Path.walk(top_down=True, on_error=None, follow_symlinks=False) + + Generate the file names in a directory tree by walking the tree + either top-down or bottom-up. + + For each directory in the directory tree rooted at *self* (including + *self* but excluding '.' and '..'), the method yields a 3-tuple of + ``(dirpath, dirnames, filenames)``. + + *dirpath* is a :class:`Path` to the directory currently being walked, + *dirnames* is a list of strings for the names of subdirectories in *dirpath* + (excluding ``'.'`` and ``'..'``), and *filenames* is a list of strings for + the names of the non-directory files in *dirpath*. To get a full path + (which begins with *self*) to a file or directory in *dirpath*, do + ``dirpath / name``. Whether or not the lists are sorted is file + system-dependent. + + If the optional argument *top_down* is true (which is the default), the triple for a + directory is generated before the triples for any of its subdirectories + (directories are walked top-down). If *top_down* is false, the triple + for a directory is generated after the triples for all of its subdirectories + (directories are walked bottom-up). No matter the value of *top_down*, the + list of subdirectories is retrieved before the triples for the directory and + its subdirectories are walked. + + When *top_down* is true, the caller can modify the *dirnames* list in-place + (for example, using :keyword:`del` or slice assignment), and :meth:`Path.walk` + will only recurse into the subdirectories whose names remain in *dirnames*. + This can be used to prune the search, or to impose a specific order of visiting, + or even to inform :meth:`Path.walk` about directories the caller creates or + renames before it resumes :meth:`Path.walk` again. Modifying *dirnames* when + *top_down* is false has no effect on the behavior of :meth:`Path.walk()` since the + directories in *dirnames* have already been generated by the time *dirnames* + is yielded to the caller. + + By default, errors from :func:`os.scandir` are ignored. If the optional + argument *on_error* is specified, it should be a callable; it will be + called with one argument, an :exc:`OSError` instance. The callable can handle the + error to continue the walk or re-raise it to stop the walk. Note that the + filename is available as the ``filename`` attribute of the exception object. + + By default, :meth:`Path.walk` does not follow symbolic links, and instead adds them + to the *filenames* list. Set *follow_symlinks* to true to resolve symlinks + and place them in *dirnames* and *filenames* as appropriate for their targets, and + consequently visit directories pointed to by symlinks (where supported). + + .. note:: + + Be aware that setting *follow_symlinks* to true can lead to infinite + recursion if a link points to a parent directory of itself. :meth:`Path.walk` + does not keep track of the directories it has already visited. + + .. note:: + :meth:`Path.walk` assumes the directories it walks are not modified during + execution. For example, if a directory from *dirnames* has been replaced + with a symlink and *follow_symlinks* is false, :meth:`Path.walk` will + still try to descend into it. To prevent such behavior, remove directories + from *dirnames* as appropriate. + + .. note:: Unlike :func:`os.walk`, :meth:`Path.walk` lists symlinks to directories in *filenames* if *follow_symlinks* is false. @@ -1281,16 +1324,89 @@ call fails (for example because the path doesn't exist). .. versionadded:: 3.12 -.. method:: Path.lchmod(mode) - Like :meth:`Path.chmod` but, if the path points to a symbolic link, the - symbolic link's mode is changed rather than its target's. +Other methods +^^^^^^^^^^^^^ +.. classmethod:: Path.cwd() -.. method:: Path.lstat() + Return a new path object representing the current directory (as returned + by :func:`os.getcwd`):: - Like :meth:`Path.stat` but, if the path points to a symbolic link, return - the symbolic link's information rather than its target's. + >>> Path.cwd() + PosixPath('/home/antoine/pathlib') + + +.. classmethod:: Path.home() + + Return a new path object representing the user's home directory (as + returned by :func:`os.path.expanduser` with ``~`` construct). If the home + directory can't be resolved, :exc:`RuntimeError` is raised. + + :: + + >>> Path.home() + PosixPath('/home/antoine') + + .. versionadded:: 3.5 + + +.. method:: Path.chmod(mode, *, follow_symlinks=True) + + Change the file mode and permissions, like :func:`os.chmod`. + + This method normally follows symlinks. Some Unix flavours support changing + permissions on the symlink itself; on these platforms you may add the + argument ``follow_symlinks=False``, or use :meth:`~Path.lchmod`. + + :: + + >>> p = Path('setup.py') + >>> p.stat().st_mode + 33277 + >>> p.chmod(0o444) + >>> p.stat().st_mode + 33060 + + .. versionchanged:: 3.10 + The *follow_symlinks* parameter was added. + + +.. method:: Path.expanduser() + + Return a new path with expanded ``~`` and ``~user`` constructs, + as returned by :meth:`os.path.expanduser`. If a home directory can't be + resolved, :exc:`RuntimeError` is raised. + + :: + + >>> p = PosixPath('~/films/Monty Python') + >>> p.expanduser() + PosixPath('/home/eric/films/Monty Python') + + .. versionadded:: 3.5 + + +.. method:: Path.group(*, follow_symlinks=True) + + Return the name of the group owning the file. :exc:`KeyError` is raised + if the file's gid isn't found in the system database. + + This method normally follows symlinks; to get the group of the symlink, add + the argument ``follow_symlinks=False``. + + .. versionchanged:: 3.13 + Raises :exc:`UnsupportedOperation` if the :mod:`grp` module is not + available. In previous versions, :exc:`NotImplementedError` was raised. + + .. versionchanged:: 3.13 + The *follow_symlinks* parameter was added. + + +.. method:: Path.lchmod(mode) + + Like :meth:`Path.chmod` but, if the path points to a symbolic link, the + symbolic link's mode is changed rather than its target's. .. method:: Path.mkdir(mode=0o777, parents=False, exist_ok=False) @@ -1318,18 +1434,6 @@ call fails (for example because the path doesn't exist). The *exist_ok* parameter was added. -.. method:: Path.open(mode='r', buffering=-1, encoding=None, errors=None, newline=None) - - Open the file pointed to by the path, like the built-in :func:`open` - function does:: - - >>> p = Path('setup.py') - >>> with p.open() as f: - ... f.readline() - ... - '#!/usr/bin/env python3\n' - - .. method:: Path.owner(*, follow_symlinks=True) Return the name of the user owning the file. :exc:`KeyError` is raised @@ -1346,37 +1450,6 @@ call fails (for example because the path doesn't exist). The *follow_symlinks* parameter was added. -.. method:: Path.read_bytes() - - Return the binary contents of the pointed-to file as a bytes object:: - - >>> p = Path('my_binary_file') - >>> p.write_bytes(b'Binary file contents') - 20 - >>> p.read_bytes() - b'Binary file contents' - - .. versionadded:: 3.5 - - -.. method:: Path.read_text(encoding=None, errors=None, newline=None) - - Return the decoded contents of the pointed-to file as a string:: - - >>> p = Path('my_text_file') - >>> p.write_text('Text file contents') - 18 - >>> p.read_text() - 'Text file contents' - - The file is opened and then closed. The optional parameters have the same - meaning as in :func:`open`. - - .. versionadded:: 3.5 - - .. versionchanged:: 3.13 - The *newline* parameter was added. - .. method:: Path.readlink() Return the path to which the symbolic link points (as returned by @@ -1482,27 +1555,6 @@ call fails (for example because the path doesn't exist). Remove this directory. The directory must be empty. -.. method:: Path.samefile(other_path) - - Return whether this path points to the same file as *other_path*, which - can be either a Path object, or a string. The semantics are similar - to :func:`os.path.samefile` and :func:`os.path.samestat`. - - An :exc:`OSError` can be raised if either file cannot be accessed for some - reason. - - :: - - >>> p = Path('spam') - >>> q = Path('eggs') - >>> p.samefile(q) - False - >>> p.samefile('spam') - True - - .. versionadded:: 3.5 - - .. method:: Path.symlink_to(target, target_is_directory=False) Make this path a symbolic link pointing to *target*. @@ -1572,42 +1624,6 @@ call fails (for example because the path doesn't exist). The *missing_ok* parameter was added. -.. method:: Path.write_bytes(data) - - Open the file pointed to in bytes mode, write *data* to it, and close the - file:: - - >>> p = Path('my_binary_file') - >>> p.write_bytes(b'Binary file contents') - 20 - >>> p.read_bytes() - b'Binary file contents' - - An existing file of the same name is overwritten. - - .. versionadded:: 3.5 - - -.. method:: Path.write_text(data, encoding=None, errors=None, newline=None) - - Open the file pointed to in text mode, write *data* to it, and close the - file:: - - >>> p = Path('my_text_file') - >>> p.write_text('Text file contents') - 18 - >>> p.read_text() - 'Text file contents' - - An existing file of the same name is overwritten. The optional parameters - have the same meaning as in :func:`open`. - - .. versionadded:: 3.5 - - .. versionchanged:: 3.10 - The *newline* parameter was added. - - .. _pathlib-pattern-language: Pattern language diff --git a/Doc/library/pdb.rst b/Doc/library/pdb.rst index 7a47a7d5d6754f..cd6496203949ea 100644 --- a/Doc/library/pdb.rst +++ b/Doc/library/pdb.rst @@ -123,6 +123,11 @@ The typical usage to inspect a crashed program is:: 0 (Pdb) +.. versionchanged:: 3.13 + The implementation of :pep:`667` means that name assignments made via ``pdb`` + will immediately affect the active scope, even when running inside an + :term:`optimized scope`. + The module defines the following functions; each enters the debugger in a slightly different way: @@ -285,6 +290,8 @@ There are three preset *convenience variables*: .. versionadded:: 3.12 + Added the *convenience variable* feature. + .. index:: pair: .pdbrc; file triple: debugger; configuration; file @@ -579,18 +586,17 @@ can be overridden by the local file. .. pdbcommand:: interact - Start an interactive interpreter (using the :mod:`code` module) whose global - namespace contains all the (global and local) names found in the current - scope. Use ``exit()`` or ``quit()`` to exit the interpreter and return to - the debugger. + Start an interactive interpreter (using the :mod:`code` module) in a new + global namespace initialised from the local and global namespaces for the + current scope. Use ``exit()`` or ``quit()`` to exit the interpreter and + return to the debugger. .. note:: - Because interact creates a new global namespace with the current global - and local namespace for execution, assignment to variables will not - affect the original namespaces. - However, modification to the mutable objects will be reflected in the - original namespaces. + As ``interact`` creates a new dedicated namespace for code execution, + assignments to variables will not affect the original namespaces. + However, modifications to any referenced mutable objects will be reflected + in the original namespaces as usual. .. versionadded:: 3.2 diff --git a/Doc/library/pickle.rst b/Doc/library/pickle.rst index 223c27237e4d34..57fbe5b6ece6b6 100644 --- a/Doc/library/pickle.rst +++ b/Doc/library/pickle.rst @@ -1,5 +1,5 @@ -:mod:`pickle` --- Python object serialization -============================================= +:mod:`!pickle` --- Python object serialization +============================================== .. module:: pickle :synopsis: Convert Python objects to streams of bytes and back. @@ -314,16 +314,16 @@ The :mod:`pickle` module exports three classes, :class:`Pickler`, map the new Python 3 names to the old module names used in Python 2, so that the pickle data stream is readable with Python 2. - If *buffer_callback* is None (the default), buffer views are + If *buffer_callback* is ``None`` (the default), buffer views are serialized into *file* as part of the pickle stream. - If *buffer_callback* is not None, then it can be called any number + If *buffer_callback* is not ``None``, then it can be called any number of times with a buffer view. If the callback returns a false value - (such as None), the given buffer is :ref:`out-of-band `; + (such as ``None``), the given buffer is :ref:`out-of-band `; otherwise the buffer is serialized in-band, i.e. inside the pickle stream. - It is an error if *buffer_callback* is not None and *protocol* is - None or smaller than 5. + It is an error if *buffer_callback* is not ``None`` and *protocol* is + ``None`` or smaller than 5. .. versionchanged:: 3.8 The *buffer_callback* argument was added. @@ -420,12 +420,12 @@ The :mod:`pickle` module exports three classes, :class:`Pickler`, instances of :class:`~datetime.datetime`, :class:`~datetime.date` and :class:`~datetime.time` pickled by Python 2. - If *buffers* is None (the default), then all data necessary for + If *buffers* is ``None`` (the default), then all data necessary for deserialization must be contained in the pickle stream. This means - that the *buffer_callback* argument was None when a :class:`Pickler` + that the *buffer_callback* argument was ``None`` when a :class:`Pickler` was instantiated (or when :func:`dump` or :func:`dumps` was called). - If *buffers* is not None, it should be an iterable of buffer-enabled + If *buffers* is not ``None``, it should be an iterable of buffer-enabled objects that is consumed each time the pickle stream references an :ref:`out-of-band ` buffer view. Such buffers have been given in order to the *buffer_callback* of a Pickler object. diff --git a/Doc/library/pickletools.rst b/Doc/library/pickletools.rst index 9739207a224431..e072605974f6c2 100644 --- a/Doc/library/pickletools.rst +++ b/Doc/library/pickletools.rst @@ -1,5 +1,5 @@ -:mod:`pickletools` --- Tools for pickle developers -================================================== +:mod:`!pickletools` --- Tools for pickle developers +=================================================== .. module:: pickletools :synopsis: Contains extensive comments about the pickle protocols and diff --git a/Doc/library/pkgutil.rst b/Doc/library/pkgutil.rst index 891a867d1ceb68..5d4ff34ba029a0 100644 --- a/Doc/library/pkgutil.rst +++ b/Doc/library/pkgutil.rst @@ -1,5 +1,5 @@ -:mod:`pkgutil` --- Package extension utility -============================================ +:mod:`!pkgutil` --- Package extension utility +============================================= .. module:: pkgutil :synopsis: Utilities for the import system. diff --git a/Doc/library/platform.rst b/Doc/library/platform.rst index 66af37e3073852..f082393ef9363c 100644 --- a/Doc/library/platform.rst +++ b/Doc/library/platform.rst @@ -1,5 +1,5 @@ -:mod:`platform` --- Access to underlying platform's identifying data -===================================================================== +:mod:`!platform` --- Access to underlying platform's identifying data +====================================================================== .. module:: platform :synopsis: Retrieves as much platform identifying data as possible. diff --git a/Doc/library/plistlib.rst b/Doc/library/plistlib.rst index 7416ca2650bab4..78b3c2697bd696 100644 --- a/Doc/library/plistlib.rst +++ b/Doc/library/plistlib.rst @@ -1,5 +1,5 @@ -:mod:`plistlib` --- Generate and parse Apple ``.plist`` files -============================================================= +:mod:`!plistlib` --- Generate and parse Apple ``.plist`` files +============================================================== .. module:: plistlib :synopsis: Generate and parse Apple plist files. diff --git a/Doc/library/poplib.rst b/Doc/library/poplib.rst index 943eb21f6eec02..23f20b00e6dc6d 100644 --- a/Doc/library/poplib.rst +++ b/Doc/library/poplib.rst @@ -1,5 +1,5 @@ -:mod:`poplib` --- POP3 protocol client -====================================== +:mod:`!poplib` --- POP3 protocol client +======================================= .. module:: poplib :synopsis: POP3 protocol client (requires sockets). diff --git a/Doc/library/posix.rst b/Doc/library/posix.rst index 5871574b442667..14ab3e91e8a8e4 100644 --- a/Doc/library/posix.rst +++ b/Doc/library/posix.rst @@ -1,5 +1,5 @@ -:mod:`posix` --- The most common POSIX system calls -=================================================== +:mod:`!posix` --- The most common POSIX system calls +==================================================== .. module:: posix :platform: Unix diff --git a/Doc/library/pprint.rst b/Doc/library/pprint.rst index 6dfea25d755f75..df706c10ce9ec4 100644 --- a/Doc/library/pprint.rst +++ b/Doc/library/pprint.rst @@ -1,5 +1,5 @@ -:mod:`pprint` --- Data pretty printer -===================================== +:mod:`!pprint` --- Data pretty printer +====================================== .. module:: pprint :synopsis: Data pretty printer. diff --git a/Doc/library/profile.rst b/Doc/library/profile.rst index 3ca802e024bc27..9721da7220d54d 100644 --- a/Doc/library/profile.rst +++ b/Doc/library/profile.rst @@ -234,7 +234,7 @@ functions: .. function:: runctx(command, globals, locals, filename=None, sort=-1) This function is similar to :func:`run`, with added arguments to supply the - globals and locals dictionaries for the *command* string. This routine + globals and locals mappings for the *command* string. This routine executes:: exec(command, globals, locals) diff --git a/Doc/library/pty.rst b/Doc/library/pty.rst index bd2f5ed45cb8b4..1a44bb13a841de 100644 --- a/Doc/library/pty.rst +++ b/Doc/library/pty.rst @@ -1,5 +1,5 @@ -:mod:`pty` --- Pseudo-terminal utilities -======================================== +:mod:`!pty` --- Pseudo-terminal utilities +========================================= .. module:: pty :platform: Unix diff --git a/Doc/library/pwd.rst b/Doc/library/pwd.rst index a6c6d79b60b20a..e1ff32912132f7 100644 --- a/Doc/library/pwd.rst +++ b/Doc/library/pwd.rst @@ -1,5 +1,5 @@ -:mod:`pwd` --- The password database -==================================== +:mod:`!pwd` --- The password database +===================================== .. module:: pwd :platform: Unix diff --git a/Doc/library/py_compile.rst b/Doc/library/py_compile.rst index a35fa0ba3f7bde..75aa739d1003b8 100644 --- a/Doc/library/py_compile.rst +++ b/Doc/library/py_compile.rst @@ -1,5 +1,5 @@ -:mod:`py_compile` --- Compile Python source files -================================================= +:mod:`!py_compile` --- Compile Python source files +================================================== .. module:: py_compile :synopsis: Generate byte-code files from Python source files. diff --git a/Doc/library/pyclbr.rst b/Doc/library/pyclbr.rst index 1e9876849b02f3..5efb11d89dd143 100644 --- a/Doc/library/pyclbr.rst +++ b/Doc/library/pyclbr.rst @@ -1,5 +1,5 @@ -:mod:`pyclbr` --- Python module browser support -=============================================== +:mod:`!pyclbr` --- Python module browser support +================================================ .. module:: pyclbr :synopsis: Supports information extraction for a Python module browser. @@ -142,7 +142,7 @@ Class Objects .. attribute:: parent - For top-level classes, None. For nested classes, the parent. + For top-level classes, ``None``. For nested classes, the parent. .. versionadded:: 3.7 diff --git a/Doc/library/pydoc.rst b/Doc/library/pydoc.rst index df969b2fc7c04c..f7ca1e045699eb 100644 --- a/Doc/library/pydoc.rst +++ b/Doc/library/pydoc.rst @@ -1,5 +1,5 @@ -:mod:`pydoc` --- Documentation generator and online help system -=============================================================== +:mod:`!pydoc` --- Documentation generator and online help system +================================================================ .. module:: pydoc :synopsis: Documentation generator and online help system. diff --git a/Doc/library/pyexpat.rst b/Doc/library/pyexpat.rst index c4b4e6319277af..c0e9999f4b1270 100644 --- a/Doc/library/pyexpat.rst +++ b/Doc/library/pyexpat.rst @@ -1,5 +1,5 @@ -:mod:`xml.parsers.expat` --- Fast XML parsing using Expat -========================================================= +:mod:`!xml.parsers.expat` --- Fast XML parsing using Expat +========================================================== .. module:: xml.parsers.expat :synopsis: An interface to the Expat non-validating XML parser. @@ -210,7 +210,7 @@ XMLParser Objects by default until a sufficient amount of input is reached. Due to this delay, registered handlers may — depending of the sizing of input chunks pushed to Expat — no longer be called right after pushing new - input to the parser. Where immediate feedback and taking over responsiblity + input to the parser. Where immediate feedback and taking over responsibility of protecting against denial of service from large tokens are both wanted, calling ``SetReparseDeferralEnabled(False)`` disables reparse deferral for the current Expat parser instance, temporarily or altogether. diff --git a/Doc/library/queue.rst b/Doc/library/queue.rst index fce23313c7de28..fbbebcf4ed8f92 100644 --- a/Doc/library/queue.rst +++ b/Doc/library/queue.rst @@ -1,5 +1,5 @@ -:mod:`queue` --- A synchronized queue class -=========================================== +:mod:`!queue` --- A synchronized queue class +============================================ .. module:: queue :synopsis: A synchronized queue class. diff --git a/Doc/library/quopri.rst b/Doc/library/quopri.rst index 86717c00c3c136..977cb08d836afe 100644 --- a/Doc/library/quopri.rst +++ b/Doc/library/quopri.rst @@ -1,5 +1,5 @@ -:mod:`quopri` --- Encode and decode MIME quoted-printable data -============================================================== +:mod:`!quopri` --- Encode and decode MIME quoted-printable data +=============================================================== .. module:: quopri :synopsis: Encode and decode files using the MIME quoted-printable encoding. diff --git a/Doc/library/random.rst b/Doc/library/random.rst index 22a799c706b7e2..755d1c8908c966 100644 --- a/Doc/library/random.rst +++ b/Doc/library/random.rst @@ -1,5 +1,5 @@ -:mod:`random` --- Generate pseudo-random numbers -================================================ +:mod:`!random` --- Generate pseudo-random numbers +================================================= .. module:: random :synopsis: Generate pseudo-random numbers with various common distributions. diff --git a/Doc/library/re.rst b/Doc/library/re.rst index fe7da856076819..39788de76b558b 100644 --- a/Doc/library/re.rst +++ b/Doc/library/re.rst @@ -1,5 +1,5 @@ -:mod:`re` --- Regular expression operations -=========================================== +:mod:`!re` --- Regular expression operations +============================================ .. module:: re :synopsis: Regular expression operations. diff --git a/Doc/library/readline.rst b/Doc/library/readline.rst index 8f8718ec51c41b..5658b93c81dc99 100644 --- a/Doc/library/readline.rst +++ b/Doc/library/readline.rst @@ -1,5 +1,5 @@ -:mod:`readline` --- GNU readline interface -========================================== +:mod:`!readline` --- GNU readline interface +=========================================== .. module:: readline :platform: Unix diff --git a/Doc/library/reprlib.rst b/Doc/library/reprlib.rst index 678a11c6f45490..28c7855dfeeef3 100644 --- a/Doc/library/reprlib.rst +++ b/Doc/library/reprlib.rst @@ -1,5 +1,5 @@ -:mod:`reprlib` --- Alternate :func:`repr` implementation -======================================================== +:mod:`!reprlib` --- Alternate :func:`repr` implementation +========================================================= .. module:: reprlib :synopsis: Alternate repr() implementation with size limits. diff --git a/Doc/library/resource.rst b/Doc/library/resource.rst index 4fea8d5cb718c1..dd80b1e6670d92 100644 --- a/Doc/library/resource.rst +++ b/Doc/library/resource.rst @@ -1,5 +1,5 @@ -:mod:`resource` --- Resource usage information -============================================== +:mod:`!resource` --- Resource usage information +=============================================== .. module:: resource :platform: Unix diff --git a/Doc/library/rlcompleter.rst b/Doc/library/rlcompleter.rst index 8287699c5f013e..91779feb525013 100644 --- a/Doc/library/rlcompleter.rst +++ b/Doc/library/rlcompleter.rst @@ -1,5 +1,5 @@ -:mod:`rlcompleter` --- Completion function for GNU readline -=========================================================== +:mod:`!rlcompleter` --- Completion function for GNU readline +============================================================ .. module:: rlcompleter :synopsis: Python identifier completion, suitable for the GNU readline library. diff --git a/Doc/library/runpy.rst b/Doc/library/runpy.rst index f2cb595f495f6b..b07ec6e93f80ab 100644 --- a/Doc/library/runpy.rst +++ b/Doc/library/runpy.rst @@ -1,5 +1,5 @@ -:mod:`runpy` --- Locating and executing Python modules -====================================================== +:mod:`!runpy` --- Locating and executing Python modules +======================================================= .. module:: runpy :synopsis: Locate and run Python modules without importing them first. diff --git a/Doc/library/sched.rst b/Doc/library/sched.rst index 4c980dd97f9394..517dbe8c321898 100644 --- a/Doc/library/sched.rst +++ b/Doc/library/sched.rst @@ -1,5 +1,5 @@ -:mod:`sched` --- Event scheduler -================================ +:mod:`!sched` --- Event scheduler +================================= .. module:: sched :synopsis: General purpose event scheduler. diff --git a/Doc/library/secrets.rst b/Doc/library/secrets.rst index 8f1a68d1d8816c..1401a925103517 100644 --- a/Doc/library/secrets.rst +++ b/Doc/library/secrets.rst @@ -1,5 +1,5 @@ -:mod:`secrets` --- Generate secure random numbers for managing secrets -====================================================================== +:mod:`!secrets` --- Generate secure random numbers for managing secrets +======================================================================= .. module:: secrets :synopsis: Generate secure random numbers for managing secrets. @@ -42,13 +42,13 @@ randomness that your operating system provides. sources provided by the operating system. See :class:`random.SystemRandom` for additional details. -.. function:: choice(sequence) +.. function:: choice(seq) Return a randomly chosen element from a non-empty sequence. -.. function:: randbelow(n) +.. function:: randbelow(exclusive_upper_bound) - Return a random int in the range [0, *n*). + Return a random int in the range [0, *exclusive_upper_bound*). .. function:: randbits(k) diff --git a/Doc/library/select.rst b/Doc/library/select.rst index a0058046d0ce4c..06ebaf0201e0e7 100644 --- a/Doc/library/select.rst +++ b/Doc/library/select.rst @@ -1,5 +1,5 @@ -:mod:`select` --- Waiting for I/O completion -============================================ +:mod:`!select` --- Waiting for I/O completion +============================================= .. module:: select :synopsis: Wait for I/O completion on multiple streams. diff --git a/Doc/library/selectors.rst b/Doc/library/selectors.rst index 76cbf91412f763..de8c3ef0ea2275 100644 --- a/Doc/library/selectors.rst +++ b/Doc/library/selectors.rst @@ -1,5 +1,5 @@ -:mod:`selectors` --- High-level I/O multiplexing -================================================ +:mod:`!selectors` --- High-level I/O multiplexing +================================================= .. module:: selectors :synopsis: High-level I/O multiplexing. diff --git a/Doc/library/shelve.rst b/Doc/library/shelve.rst index 1fa614f6584170..6e74a59b82b8ec 100644 --- a/Doc/library/shelve.rst +++ b/Doc/library/shelve.rst @@ -1,5 +1,5 @@ -:mod:`shelve` --- Python object persistence -=========================================== +:mod:`!shelve` --- Python object persistence +============================================ .. module:: shelve :synopsis: Python object persistence. diff --git a/Doc/library/shlex.rst b/Doc/library/shlex.rst index 716420f5e74ffa..a96f0864dc1260 100644 --- a/Doc/library/shlex.rst +++ b/Doc/library/shlex.rst @@ -1,5 +1,5 @@ -:mod:`shlex` --- Simple lexical analysis -======================================== +:mod:`!shlex` --- Simple lexical analysis +========================================= .. module:: shlex :synopsis: Simple lexical analysis for Unix shell-like languages. diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst index 331acfce3afee3..fd32479195eca8 100644 --- a/Doc/library/shutil.rst +++ b/Doc/library/shutil.rst @@ -1,5 +1,5 @@ -:mod:`shutil` --- High-level file operations -============================================ +:mod:`!shutil` --- High-level file operations +============================================= .. module:: shutil :synopsis: High-level file operations, including copying. @@ -338,7 +338,7 @@ Directory and files operations before removing the junction. .. versionchanged:: 3.11 - The *dir_fd* parameter. + Added the *dir_fd* parameter. .. versionchanged:: 3.12 Added the *onexc* parameter, deprecated *onerror*. diff --git a/Doc/library/signal.rst b/Doc/library/signal.rst index 05ef45c123b02e..48c6841c648ca4 100644 --- a/Doc/library/signal.rst +++ b/Doc/library/signal.rst @@ -1,5 +1,5 @@ -:mod:`signal` --- Set handlers for asynchronous events -====================================================== +:mod:`!signal` --- Set handlers for asynchronous events +======================================================= .. module:: signal :synopsis: Set handlers for asynchronous events. diff --git a/Doc/library/site.rst b/Doc/library/site.rst index e52bbd32d4d493..1c420419568a90 100644 --- a/Doc/library/site.rst +++ b/Doc/library/site.rst @@ -1,5 +1,5 @@ -:mod:`site` --- Site-specific configuration hook -================================================ +:mod:`!site` --- Site-specific configuration hook +================================================= .. module:: site :synopsis: Module responsible for site-specific configuration. diff --git a/Doc/library/smtplib.rst b/Doc/library/smtplib.rst index aaec2aa1ef1dbe..2511ef7f2ada41 100644 --- a/Doc/library/smtplib.rst +++ b/Doc/library/smtplib.rst @@ -1,5 +1,5 @@ -:mod:`smtplib` --- SMTP protocol client -======================================= +:mod:`!smtplib` --- SMTP protocol client +======================================== .. module:: smtplib :synopsis: SMTP protocol client (requires sockets). diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst index 76af783c6292f9..2df0257d1f24f0 100644 --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -1,5 +1,5 @@ -:mod:`socket` --- Low-level networking interface -================================================ +:mod:`!socket` --- Low-level networking interface +================================================= .. module:: socket :synopsis: Low-level networking interface. @@ -1589,7 +1589,8 @@ to sockets. Return a :term:`file object` associated with the socket. The exact returned type depends on the arguments given to :meth:`makefile`. These arguments are interpreted the same way as by the built-in :func:`open` function, except - the only supported *mode* values are ``'r'`` (default), ``'w'`` and ``'b'``. + the only supported *mode* values are ``'r'`` (default), ``'w'``, ``'b'``, or + a combination of those. The socket must be in blocking mode; it can have a timeout, but the file object's internal buffer may end up in an inconsistent state if a timeout diff --git a/Doc/library/socketserver.rst b/Doc/library/socketserver.rst index 864b1dadb78562..f1f87ea975ca42 100644 --- a/Doc/library/socketserver.rst +++ b/Doc/library/socketserver.rst @@ -1,5 +1,5 @@ -:mod:`socketserver` --- A framework for network servers -======================================================= +:mod:`!socketserver` --- A framework for network servers +======================================================== .. module:: socketserver :synopsis: A framework for network servers. diff --git a/Doc/library/sqlite3.rst b/Doc/library/sqlite3.rst index e6961821b639b9..6da8798ddfe0c0 100644 --- a/Doc/library/sqlite3.rst +++ b/Doc/library/sqlite3.rst @@ -1,5 +1,5 @@ -:mod:`sqlite3` --- DB-API 2.0 interface for SQLite databases -============================================================ +:mod:`!sqlite3` --- DB-API 2.0 interface for SQLite databases +============================================================= .. module:: sqlite3 :synopsis: A DB-API 2.0 implementation using SQLite 3.x. diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst index a90436286ca819..99abf45469018e 100644 --- a/Doc/library/ssl.rst +++ b/Doc/library/ssl.rst @@ -1,5 +1,5 @@ -:mod:`ssl` --- TLS/SSL wrapper for socket objects -================================================= +:mod:`!ssl` --- TLS/SSL wrapper for socket objects +================================================== .. module:: ssl :synopsis: TLS/SSL wrapper for socket objects @@ -781,7 +781,7 @@ Constants .. data:: OP_SINGLE_DH_USE - Prevents re-use of the same DH key for distinct SSL sessions. This + Prevents reuse of the same DH key for distinct SSL sessions. This improves forward secrecy but requires more computational resources. This option only applies to server sockets. @@ -789,7 +789,7 @@ Constants .. data:: OP_SINGLE_ECDH_USE - Prevents re-use of the same ECDH key for distinct SSL sessions. This + Prevents reuse of the same ECDH key for distinct SSL sessions. This improves forward secrecy but requires more computational resources. This option only applies to server sockets. diff --git a/Doc/library/stat.rst b/Doc/library/stat.rst index f7a3b7b16fe5c3..8434b2e8c75cf4 100644 --- a/Doc/library/stat.rst +++ b/Doc/library/stat.rst @@ -1,5 +1,5 @@ -:mod:`stat` --- Interpreting :func:`~os.stat` results -===================================================== +:mod:`!stat` --- Interpreting :func:`~os.stat` results +====================================================== .. module:: stat :synopsis: Utilities for interpreting the results of os.stat(), diff --git a/Doc/library/statistics.rst b/Doc/library/statistics.rst index d5a316e45ee3e2..8453135d2e164d 100644 --- a/Doc/library/statistics.rst +++ b/Doc/library/statistics.rst @@ -1,5 +1,5 @@ -:mod:`statistics` --- Mathematical statistics functions -======================================================= +:mod:`!statistics` --- Mathematical statistics functions +======================================================== .. module:: statistics :synopsis: Mathematical statistics functions @@ -220,7 +220,7 @@ However, for reading convenience, most of the examples show sorted sequences. .. function:: harmonic_mean(data, weights=None) Return the harmonic mean of *data*, a sequence or iterable of - real-valued numbers. If *weights* is omitted or *None*, then + real-valued numbers. If *weights* is omitted or ``None``, then equal weighting is assumed. The harmonic mean is the reciprocal of the arithmetic :func:`mean` of the diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst index 8f5f9ee519a573..ca4c949aafced0 100644 --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -1768,7 +1768,7 @@ expression support in the :mod:`re` module). cases. -.. method:: str.format_map(mapping) +.. method:: str.format_map(mapping, /) Similar to ``str.format(**mapping)``, except that ``mapping`` is used directly and not copied to a :class:`dict`. This is useful @@ -4565,7 +4565,7 @@ can be used interchangeably to index the same dictionary entry. Return a shallow copy of the dictionary. - .. classmethod:: fromkeys(iterable[, value]) + .. classmethod:: fromkeys(iterable, value=None) Create a new dictionary with keys from *iterable* and values set to *value*. @@ -4575,7 +4575,7 @@ can be used interchangeably to index the same dictionary entry. such as an empty list. To get distinct values, use a :ref:`dict comprehension ` instead. - .. method:: get(key[, default]) + .. method:: get(key, default=None) Return the value for *key* if *key* is in the dictionary, else *default*. If *default* is not given, it defaults to ``None``, so that this method @@ -4617,7 +4617,7 @@ can be used interchangeably to index the same dictionary entry. .. versionadded:: 3.8 - .. method:: setdefault(key[, default]) + .. method:: setdefault(key, default=None) If *key* is in the dictionary, return its value. If not, insert *key* with a value of *default* and return *default*. *default* defaults to diff --git a/Doc/library/string.rst b/Doc/library/string.rst index 1867678b2077fc..c3c0d732cf18d4 100644 --- a/Doc/library/string.rst +++ b/Doc/library/string.rst @@ -1,5 +1,5 @@ -:mod:`string` --- Common string operations -========================================== +:mod:`!string` --- Common string operations +=========================================== .. module:: string :synopsis: Common string operations. diff --git a/Doc/library/stringprep.rst b/Doc/library/stringprep.rst index c6d78a356d97bc..37d5adf0fa9541 100644 --- a/Doc/library/stringprep.rst +++ b/Doc/library/stringprep.rst @@ -1,5 +1,5 @@ -:mod:`stringprep` --- Internet String Preparation -================================================= +:mod:`!stringprep` --- Internet String Preparation +================================================== .. module:: stringprep :synopsis: String preparation, as per RFC 3453 diff --git a/Doc/library/struct.rst b/Doc/library/struct.rst index 3e507c1c7e7c85..a2c293443e23d3 100644 --- a/Doc/library/struct.rst +++ b/Doc/library/struct.rst @@ -1,5 +1,5 @@ -:mod:`struct` --- Interpret bytes as packed binary data -======================================================= +:mod:`!struct` --- Interpret bytes as packed binary data +======================================================== .. testsetup:: * diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst index bd35fda7d79225..3a2178e2b7b839 100644 --- a/Doc/library/subprocess.rst +++ b/Doc/library/subprocess.rst @@ -1,5 +1,5 @@ -:mod:`subprocess` --- Subprocess management -=========================================== +:mod:`!subprocess` --- Subprocess management +============================================ .. module:: subprocess :synopsis: Subprocess management. @@ -52,7 +52,7 @@ underlying :class:`Popen` interface can be used directly. If *capture_output* is true, stdout and stderr will be captured. When used, the internal :class:`Popen` object is automatically created with - *stdout* and *stdin* both set to :data:`~subprocess.PIPE`. + *stdout* and *stderr* both set to :data:`~subprocess.PIPE`. The *stdout* and *stderr* arguments may not be supplied at the same time as *capture_output*. If you wish to capture and combine both streams into one, set *stdout* to :data:`~subprocess.PIPE` diff --git a/Doc/library/symtable.rst b/Doc/library/symtable.rst index 47568387f9a7ce..0480502158433a 100644 --- a/Doc/library/symtable.rst +++ b/Doc/library/symtable.rst @@ -1,5 +1,5 @@ -:mod:`symtable` --- Access to the compiler's symbol tables -========================================================== +:mod:`!symtable` --- Access to the compiler's symbol tables +=========================================================== .. module:: symtable :synopsis: Interface to the compiler's internal symbol tables. diff --git a/Doc/library/sys.monitoring.rst b/Doc/library/sys.monitoring.rst index 0e0095e108e9c0..0fa06da522049f 100644 --- a/Doc/library/sys.monitoring.rst +++ b/Doc/library/sys.monitoring.rst @@ -1,5 +1,5 @@ -:mod:`sys.monitoring` --- Execution event monitoring -==================================================== +:mod:`!sys.monitoring` --- Execution event monitoring +===================================================== .. module:: sys.monitoring :synopsis: Access and control event monitoring @@ -160,7 +160,7 @@ events, use the expression ``PY_RETURN | PY_START``. .. monitoring-event:: NO_EVENTS - An alias for ``0`` so users can do explict comparisions like:: + An alias for ``0`` so users can do explicit comparisons like:: if get_events(DEBUGGER_ID) == NO_EVENTS: ... diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst index 19a396d2300011..ed809d04167ffd 100644 --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -1,5 +1,5 @@ -:mod:`sys` --- System-specific parameters and functions -======================================================= +:mod:`!sys` --- System-specific parameters and functions +======================================================== .. module:: sys :synopsis: Access system-specific parameters and functions. @@ -1708,7 +1708,7 @@ always available. contain a tuple of (filename, line number, function name) tuples describing the traceback where the coroutine object was created, with the most recent call first. When disabled, ``cr_origin`` will - be None. + be ``None``. To enable, pass a *depth* value greater than zero; this sets the number of frames whose information will be captured. To disable, diff --git a/Doc/library/sysconfig.rst b/Doc/library/sysconfig.rst index 2faab212e46eff..9556da808f8c63 100644 --- a/Doc/library/sysconfig.rst +++ b/Doc/library/sysconfig.rst @@ -1,5 +1,5 @@ -:mod:`sysconfig` --- Provide access to Python's configuration information -========================================================================= +:mod:`!sysconfig` --- Provide access to Python's configuration information +========================================================================== .. module:: sysconfig :synopsis: Python's configuration information diff --git a/Doc/library/syslog.rst b/Doc/library/syslog.rst index 30bf3f09a24d42..548898a37bc6ea 100644 --- a/Doc/library/syslog.rst +++ b/Doc/library/syslog.rst @@ -1,5 +1,5 @@ -:mod:`syslog` --- Unix syslog library routines -============================================== +:mod:`!syslog` --- Unix syslog library routines +=============================================== .. module:: syslog :platform: Unix diff --git a/Doc/library/tabnanny.rst b/Doc/library/tabnanny.rst index dfe688a2f93e0c..4f61b3dd761400 100644 --- a/Doc/library/tabnanny.rst +++ b/Doc/library/tabnanny.rst @@ -1,5 +1,5 @@ -:mod:`tabnanny` --- Detection of ambiguous indentation -====================================================== +:mod:`!tabnanny` --- Detection of ambiguous indentation +======================================================= .. module:: tabnanny :synopsis: Tool for detecting white space related problems in Python diff --git a/Doc/library/tarfile.rst b/Doc/library/tarfile.rst index cf21dee83e6e7a..5b624f3533136f 100644 --- a/Doc/library/tarfile.rst +++ b/Doc/library/tarfile.rst @@ -1,5 +1,5 @@ -:mod:`tarfile` --- Read and write tar archive files -=================================================== +:mod:`!tarfile` --- Read and write tar archive files +==================================================== .. module:: tarfile :synopsis: Read and write tar-format archive files. diff --git a/Doc/library/tempfile.rst b/Doc/library/tempfile.rst index 9add8500c7788c..f0a81a093b435b 100644 --- a/Doc/library/tempfile.rst +++ b/Doc/library/tempfile.rst @@ -1,5 +1,5 @@ -:mod:`tempfile` --- Generate temporary files and directories -============================================================ +:mod:`!tempfile` --- Generate temporary files and directories +============================================================= .. module:: tempfile :synopsis: Generate temporary files and directories. diff --git a/Doc/library/termios.rst b/Doc/library/termios.rst index 57705ddc4e6470..0c6f3059fe71d1 100644 --- a/Doc/library/termios.rst +++ b/Doc/library/termios.rst @@ -1,5 +1,5 @@ -:mod:`termios` --- POSIX style tty control -========================================== +:mod:`!termios` --- POSIX style tty control +=========================================== .. module:: termios :platform: Unix diff --git a/Doc/library/test.rst b/Doc/library/test.rst index 92d675b48690ff..2a61f0aaef2b4c 100644 --- a/Doc/library/test.rst +++ b/Doc/library/test.rst @@ -1,5 +1,5 @@ -:mod:`test` --- Regression tests package for Python -=================================================== +:mod:`!test` --- Regression tests package for Python +==================================================== .. module:: test :synopsis: Regression tests package containing the testing suite for Python. @@ -324,7 +324,7 @@ The :mod:`test.support` module defines the following constants: .. data:: Py_DEBUG - True if Python was built with the :c:macro:`Py_DEBUG` macro + ``True`` if Python was built with the :c:macro:`Py_DEBUG` macro defined, that is, if Python was :ref:`built in debug mode `. diff --git a/Doc/library/textwrap.rst b/Doc/library/textwrap.rst index 7445410f91808c..a58b460fef409c 100644 --- a/Doc/library/textwrap.rst +++ b/Doc/library/textwrap.rst @@ -1,5 +1,5 @@ -:mod:`textwrap` --- Text wrapping and filling -============================================= +:mod:`!textwrap` --- Text wrapping and filling +============================================== .. module:: textwrap :synopsis: Text wrapping and filling @@ -154,7 +154,7 @@ hyphenated words; only then will long words be broken if necessary, unless wrapper = TextWrapper() wrapper.initial_indent = "* " - You can re-use the same :class:`TextWrapper` object many times, and you can + You can reuse the same :class:`TextWrapper` object many times, and you can change any of its options through direct assignment to instance attributes between uses. diff --git a/Doc/library/threading.rst b/Doc/library/threading.rst index 4cf98a49e11442..7b259e22dc7124 100644 --- a/Doc/library/threading.rst +++ b/Doc/library/threading.rst @@ -1,5 +1,5 @@ -:mod:`threading` --- Thread-based parallelism -============================================= +:mod:`!threading` --- Thread-based parallelism +============================================== .. module:: threading :synopsis: Thread-based parallelism. @@ -598,14 +598,25 @@ and "recursion level" in addition to the locked/unlocked state used by primitive locks. In the locked state, some thread owns the lock; in the unlocked state, no thread owns it. -To lock the lock, a thread calls its :meth:`~RLock.acquire` method; this -returns once the thread owns the lock. To unlock the lock, a thread calls -its :meth:`~Lock.release` method. :meth:`~Lock.acquire`/:meth:`~Lock.release` -call pairs may be nested; only the final :meth:`~Lock.release` (the -:meth:`~Lock.release` of the outermost pair) resets the lock to unlocked and -allows another thread blocked in :meth:`~Lock.acquire` to proceed. +Threads call a lock's :meth:`~RLock.acquire` method to lock it, +and its :meth:`~Lock.release` method to unlock it. -Reentrant locks also support the :ref:`context management protocol `. +.. note:: + + Reentrant locks support the :ref:`context management protocol `, + so it is recommended to use :keyword:`with` instead of manually calling + :meth:`~RLock.acquire` and :meth:`~RLock.release` + to handle acquiring and releasing the lock for a block of code. + +RLock's :meth:`~RLock.acquire`/:meth:`~RLock.release` call pairs may be nested, +unlike Lock's :meth:`~Lock.acquire`/:meth:`~Lock.release`. Only the final +:meth:`~RLock.release` (the :meth:`~Lock.release` of the outermost pair) resets +the lock to an unlocked state and allows another thread blocked in +:meth:`~RLock.acquire` to proceed. + +:meth:`~RLock.acquire`/:meth:`~RLock.release` must be used in pairs: each acquire +must have a release in the thread that has acquired the lock. Failing to +call release as many times the lock has been acquired can lead to deadlock. .. class:: RLock() @@ -624,25 +635,41 @@ Reentrant locks also support the :ref:`context management protocol ` Acquire a lock, blocking or non-blocking. - When invoked without arguments: if this thread already owns the lock, increment - the recursion level by one, and return immediately. Otherwise, if another - thread owns the lock, block until the lock is unlocked. Once the lock is - unlocked (not owned by any thread), then grab ownership, set the recursion level - to one, and return. If more than one thread is blocked waiting until the lock - is unlocked, only one at a time will be able to grab ownership of the lock. - There is no return value in this case. + .. seealso:: - When invoked with the *blocking* argument set to ``True``, do the same thing as when - called without arguments, and return ``True``. + :ref:`Using RLock as a context manager ` + Recommended over manual :meth:`!acquire` and :meth:`release` calls + whenever practical. - When invoked with the *blocking* argument set to ``False``, do not block. If a call - without an argument would block, return ``False`` immediately; otherwise, do the - same thing as when called without arguments, and return ``True``. - When invoked with the floating-point *timeout* argument set to a positive - value, block for at most the number of seconds specified by *timeout* - and as long as the lock cannot be acquired. Return ``True`` if the lock has - been acquired, ``False`` if the timeout has elapsed. + When invoked with the *blocking* argument set to ``True`` (the default): + + * If no thread owns the lock, acquire the lock and return immediately. + + * If another thread owns the lock, block until we are able to acquire + lock, or *timeout*, if set to a positive float value. + + * If the same thread owns the lock, acquire the lock again, and + return immediately. This is the difference between :class:`Lock` and + :class:`!RLock`; :class:`Lock` handles this case the same as the previous, + blocking until the lock can be acquired. + + When invoked with the *blocking* argument set to ``False``: + + * If no thread owns the lock, acquire the lock and return immediately. + + * If another thread owns the lock, return immediately. + + * If the same thread owns the lock, acquire the lock again and return + immediately. + + In all cases, if the thread was able to acquire the lock, return ``True``. + If the thread was unable to acquire the lock (i.e. if not blocking or + the timeout was reached) return ``False``. + + If called multiple times, failing to call :meth:`~RLock.release` as many times + may lead to deadlock. Consider using :class:`!RLock` as a context manager rather than + calling acquire/release directly. .. versionchanged:: 3.2 The *timeout* parameter is new. @@ -658,7 +685,7 @@ Reentrant locks also support the :ref:`context management protocol ` Only call this method when the calling thread owns the lock. A :exc:`RuntimeError` is raised if this method is called when the lock is - unlocked. + not acquired. There is no return value. diff --git a/Doc/library/time.rst b/Doc/library/time.rst index d79ca6e1208107..4d7661715aa0af 100644 --- a/Doc/library/time.rst +++ b/Doc/library/time.rst @@ -1,5 +1,5 @@ -:mod:`time` --- Time access and conversions -=========================================== +:mod:`!time` --- Time access and conversions +============================================ .. module:: time :synopsis: Time access and conversions. @@ -617,7 +617,7 @@ Functions - range [1, 12] * - 2 - - .. attribute:: tm_day + - .. attribute:: tm_mday - range [1, 31] * - 3 diff --git a/Doc/library/timeit.rst b/Doc/library/timeit.rst index 616f8365b80f6c..548a3ee0540506 100644 --- a/Doc/library/timeit.rst +++ b/Doc/library/timeit.rst @@ -1,5 +1,5 @@ -:mod:`timeit` --- Measure execution time of small code snippets -=============================================================== +:mod:`!timeit` --- Measure execution time of small code snippets +================================================================ .. module:: timeit :synopsis: Measure the execution time of small code snippets. diff --git a/Doc/library/tkinter.colorchooser.rst b/Doc/library/tkinter.colorchooser.rst index 6e8479c1dea1e2..df2b324fd5d3a7 100644 --- a/Doc/library/tkinter.colorchooser.rst +++ b/Doc/library/tkinter.colorchooser.rst @@ -1,5 +1,5 @@ -:mod:`tkinter.colorchooser` --- Color choosing dialog -===================================================== +:mod:`!tkinter.colorchooser` --- Color choosing dialog +====================================================== .. module:: tkinter.colorchooser :platform: Tk diff --git a/Doc/library/tkinter.dnd.rst b/Doc/library/tkinter.dnd.rst index 02de0fd331958d..62298d96c26459 100644 --- a/Doc/library/tkinter.dnd.rst +++ b/Doc/library/tkinter.dnd.rst @@ -1,5 +1,5 @@ -:mod:`tkinter.dnd` --- Drag and drop support -============================================ +:mod:`!tkinter.dnd` --- Drag and drop support +============================================= .. module:: tkinter.dnd :platform: Tk @@ -25,8 +25,8 @@ Selection of a target object occurs as follows: #. Top-down search of area under mouse for target widget * Target widget should have a callable *dnd_accept* attribute - * If *dnd_accept* is not present or returns None, search moves to parent widget - * If no target widget is found, then the target object is None + * If *dnd_accept* is not present or returns ``None``, search moves to parent widget + * If no target widget is found, then the target object is ``None`` 2. Call to *.dnd_leave(source, event)* #. Call to *.dnd_enter(source, event)* diff --git a/Doc/library/tkinter.font.rst b/Doc/library/tkinter.font.rst index c7c2b7b566cf8f..ed01bd5f483943 100644 --- a/Doc/library/tkinter.font.rst +++ b/Doc/library/tkinter.font.rst @@ -1,5 +1,5 @@ -:mod:`tkinter.font` --- Tkinter font wrapper -============================================ +:mod:`!tkinter.font` --- Tkinter font wrapper +============================================= .. module:: tkinter.font :platform: Tk diff --git a/Doc/library/tkinter.messagebox.rst b/Doc/library/tkinter.messagebox.rst index 56090a0a0e424b..0dc9632ca73304 100644 --- a/Doc/library/tkinter.messagebox.rst +++ b/Doc/library/tkinter.messagebox.rst @@ -1,5 +1,5 @@ -:mod:`tkinter.messagebox` --- Tkinter message prompts -===================================================== +:mod:`!tkinter.messagebox` --- Tkinter message prompts +====================================================== .. module:: tkinter.messagebox :platform: Tk diff --git a/Doc/library/tkinter.rst b/Doc/library/tkinter.rst index 7e5dee1b562df8..f40790c1175800 100644 --- a/Doc/library/tkinter.rst +++ b/Doc/library/tkinter.rst @@ -1,5 +1,5 @@ -:mod:`tkinter` --- Python interface to Tcl/Tk -============================================= +:mod:`!tkinter` --- Python interface to Tcl/Tk +============================================== .. module:: tkinter :synopsis: Interface to Tcl/Tk for graphical user interfaces diff --git a/Doc/library/tkinter.scrolledtext.rst b/Doc/library/tkinter.scrolledtext.rst index d20365baa38690..763e24929d74b5 100644 --- a/Doc/library/tkinter.scrolledtext.rst +++ b/Doc/library/tkinter.scrolledtext.rst @@ -1,5 +1,5 @@ -:mod:`tkinter.scrolledtext` --- Scrolled Text Widget -==================================================== +:mod:`!tkinter.scrolledtext` --- Scrolled Text Widget +===================================================== .. module:: tkinter.scrolledtext :platform: Tk diff --git a/Doc/library/tkinter.ttk.rst b/Doc/library/tkinter.ttk.rst index bd0d8b3799a0f1..628e9f945ac365 100644 --- a/Doc/library/tkinter.ttk.rst +++ b/Doc/library/tkinter.ttk.rst @@ -1,5 +1,5 @@ -:mod:`tkinter.ttk` --- Tk themed widgets -======================================== +:mod:`!tkinter.ttk` --- Tk themed widgets +========================================= .. module:: tkinter.ttk :synopsis: Tk themed widget set diff --git a/Doc/library/token.rst b/Doc/library/token.rst index e6dc37d7ad852c..919ff590b72916 100644 --- a/Doc/library/token.rst +++ b/Doc/library/token.rst @@ -1,5 +1,5 @@ -:mod:`token` --- Constants used with Python parse trees -======================================================= +:mod:`!token` --- Constants used with Python parse trees +======================================================== .. module:: token :synopsis: Constants representing terminal nodes of the parse tree. diff --git a/Doc/library/tokenize.rst b/Doc/library/tokenize.rst index 92bdb052267a68..f719319a302a23 100644 --- a/Doc/library/tokenize.rst +++ b/Doc/library/tokenize.rst @@ -1,5 +1,5 @@ -:mod:`tokenize` --- Tokenizer for Python source -=============================================== +:mod:`!tokenize` --- Tokenizer for Python source +================================================ .. module:: tokenize :synopsis: Lexical scanner for Python source code. diff --git a/Doc/library/tomllib.rst b/Doc/library/tomllib.rst index 406985b84471f2..b523ad93b35f9d 100644 --- a/Doc/library/tomllib.rst +++ b/Doc/library/tomllib.rst @@ -1,5 +1,5 @@ -:mod:`tomllib` --- Parse TOML files -=================================== +:mod:`!tomllib` --- Parse TOML files +==================================== .. module:: tomllib :synopsis: Parse TOML files. diff --git a/Doc/library/trace.rst b/Doc/library/trace.rst index 8854905e192b45..cae94ea08e17e5 100644 --- a/Doc/library/trace.rst +++ b/Doc/library/trace.rst @@ -1,5 +1,5 @@ -:mod:`trace` --- Trace or track Python statement execution -========================================================== +:mod:`!trace` --- Trace or track Python statement execution +=========================================================== .. module:: trace :synopsis: Trace or track Python statement execution. diff --git a/Doc/library/traceback.rst b/Doc/library/traceback.rst index ab83e0df10b709..bfd2c3efc4b1f6 100644 --- a/Doc/library/traceback.rst +++ b/Doc/library/traceback.rst @@ -1,5 +1,5 @@ -:mod:`traceback` --- Print or retrieve a stack traceback -======================================================== +:mod:`!traceback` --- Print or retrieve a stack traceback +========================================================= .. module:: traceback :synopsis: Print or retrieve a stack traceback. @@ -473,7 +473,7 @@ in a :ref:`traceback `. attribute accessed (which also happens when casting it to a :class:`tuple`). :attr:`~FrameSummary.line` may be directly provided, and will prevent line lookups happening at all. *locals* is an optional local variable - dictionary, and if supplied the variable representations are stored in the + mapping, and if supplied the variable representations are stored in the summary for later display. :class:`!FrameSummary` instances have the following attributes: diff --git a/Doc/library/tracemalloc.rst b/Doc/library/tracemalloc.rst index 68432aeaecbcc1..2370d927292eb0 100644 --- a/Doc/library/tracemalloc.rst +++ b/Doc/library/tracemalloc.rst @@ -1,5 +1,5 @@ -:mod:`tracemalloc` --- Trace memory allocations -=============================================== +:mod:`!tracemalloc` --- Trace memory allocations +================================================ .. module:: tracemalloc :synopsis: Trace memory allocations. diff --git a/Doc/library/tty.rst b/Doc/library/tty.rst index ed63561c40de24..37778bf20bdcc7 100644 --- a/Doc/library/tty.rst +++ b/Doc/library/tty.rst @@ -1,5 +1,5 @@ -:mod:`tty` --- Terminal control functions -========================================= +:mod:`!tty` --- Terminal control functions +========================================== .. module:: tty :platform: Unix @@ -53,7 +53,7 @@ The :mod:`tty` module defines the following functions: is saved before setting *fd* to raw mode; this value is returned. .. versionchanged:: 3.12 - The return value is now the original tty attributes, instead of None. + The return value is now the original tty attributes, instead of ``None``. .. function:: setcbreak(fd, when=termios.TCSAFLUSH) @@ -67,7 +67,7 @@ The :mod:`tty` module defines the following functions: the minimum input to 1 byte with no delay. .. versionchanged:: 3.12 - The return value is now the original tty attributes, instead of None. + The return value is now the original tty attributes, instead of ``None``. .. versionchanged:: 3.12.2 The ``ICRNL`` flag is no longer cleared. This restores the behavior diff --git a/Doc/library/turtle.rst b/Doc/library/turtle.rst index 2941201332a715..afda3685d606bb 100644 --- a/Doc/library/turtle.rst +++ b/Doc/library/turtle.rst @@ -120,7 +120,7 @@ off-screen):: home() The home position is at the center of the turtle's screen. If you ever need to -know them, get the turtle's x-y co-ordinates with:: +know them, get the turtle's x-y coordinates with:: pos() diff --git a/Doc/library/types.rst b/Doc/library/types.rst index 89bc0a600c0af8..116868c24be864 100644 --- a/Doc/library/types.rst +++ b/Doc/library/types.rst @@ -1,5 +1,5 @@ -:mod:`types` --- Dynamic type creation and names for built-in types -=================================================================== +:mod:`!types` --- Dynamic type creation and names for built-in types +==================================================================== .. module:: types :synopsis: Names for built-in types. diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst index f53080e0610cb1..14dd9f6b264184 100644 --- a/Doc/library/typing.rst +++ b/Doc/library/typing.rst @@ -1780,7 +1780,7 @@ without the dedicated syntax, as documented below. .. _typevartuple: -.. class:: TypeVarTuple(name, default=typing.NoDefault) +.. class:: TypeVarTuple(name, *, default=typing.NoDefault) Type variable tuple. A specialized form of :ref:`type variable ` that enables *variadic* generics. @@ -2498,7 +2498,7 @@ types. This attribute reflects *only* the value of the ``total`` argument to the current ``TypedDict`` class, not whether the class is semantically - total. For example, a ``TypedDict`` with ``__total__`` set to True may + total. For example, a ``TypedDict`` with ``__total__`` set to ``True`` may have keys marked with :data:`NotRequired`, or it may inherit from another ``TypedDict`` with ``total=False``. Therefore, it is generally better to use :attr:`__required_keys__` and :attr:`__optional_keys__` for introspection. @@ -2546,7 +2546,7 @@ types. ``__required_keys__`` and ``__optional_keys__`` rely on may not work properly, and the values of the attributes may be incorrect. - Support for :data:`ReadOnly` is reflected in the following attributes:: + Support for :data:`ReadOnly` is reflected in the following attributes: .. attribute:: __readonly_keys__ @@ -3080,35 +3080,37 @@ Introspection helpers Return a dictionary containing type hints for a function, method, module or class object. - This is often the same as ``obj.__annotations__``. In addition, - forward references encoded as string literals are handled by evaluating - them in ``globals``, ``locals`` and (where applicable) - :ref:`type parameter ` namespaces. - For a class ``C``, return - a dictionary constructed by merging all the ``__annotations__`` along - ``C.__mro__`` in reverse order. - - The function recursively replaces all ``Annotated[T, ...]`` with ``T``, - unless ``include_extras`` is set to ``True`` (see :class:`Annotated` for - more information). For example: - - .. testcode:: - - class Student(NamedTuple): - name: Annotated[str, 'some marker'] - - assert get_type_hints(Student) == {'name': str} - assert get_type_hints(Student, include_extras=False) == {'name': str} - assert get_type_hints(Student, include_extras=True) == { - 'name': Annotated[str, 'some marker'] - } + This is often the same as ``obj.__annotations__``, but this function makes + the following changes to the annotations dictionary: + + * Forward references encoded as string literals or :class:`ForwardRef` + objects are handled by evaluating them in *globalns*, *localns*, and + (where applicable) *obj*'s :ref:`type parameter ` namespace. + If *globalns* or *localns* is not given, appropriate namespace + dictionaries are inferred from *obj*. + * ``None`` is replaced with :class:`types.NoneType`. + * If :func:`@no_type_check ` has been applied to *obj*, an + empty dictionary is returned. + * If *obj* is a class ``C``, the function returns a dictionary that merges + annotations from ``C``'s base classes with those on ``C`` directly. This + is done by traversing ``C.__mro__`` and iteratively combining + ``__annotations__`` dictionaries. Annotations on classes appearing + earlier in the :term:`method resolution order` always take precedence over + annotations on classes appearing later in the method resolution order. + * The function recursively replaces all occurrences of ``Annotated[T, ...]`` + with ``T``, unless *include_extras* is set to ``True`` (see + :class:`Annotated` for more information). + + See also :func:`inspect.get_annotations`, a lower-level function that + returns annotations more directly. .. note:: - :func:`get_type_hints` does not work with imported - :ref:`type aliases ` that include forward references. - Enabling postponed evaluation of annotations (:pep:`563`) may remove - the need for most forward references. + If any forward references in the annotations of *obj* are not resolvable + or are not valid Python code, this function will raise an exception + such as :exc:`NameError`. For example, this can happen with imported + :ref:`type aliases ` that include forward references, + or with names imported under :data:`if TYPE_CHECKING `. .. versionchanged:: 3.9 Added ``include_extras`` parameter as part of :pep:`593`. diff --git a/Doc/library/unicodedata.rst b/Doc/library/unicodedata.rst index 7db47d48022a0e..37dc37513fa15d 100644 --- a/Doc/library/unicodedata.rst +++ b/Doc/library/unicodedata.rst @@ -1,5 +1,5 @@ -:mod:`unicodedata` --- Unicode Database -======================================= +:mod:`!unicodedata` --- Unicode Database +======================================== .. module:: unicodedata :synopsis: Access the Unicode Database. diff --git a/Doc/library/unittest.mock-examples.rst b/Doc/library/unittest.mock-examples.rst index f2bdde80bdbd64..00cc9bfc0a5f2b 100644 --- a/Doc/library/unittest.mock-examples.rst +++ b/Doc/library/unittest.mock-examples.rst @@ -1,5 +1,5 @@ -:mod:`unittest.mock` --- getting started -======================================== +:mod:`!unittest.mock` --- getting started +========================================= .. moduleauthor:: Michael Foord .. currentmodule:: unittest.mock diff --git a/Doc/library/unittest.mock.rst b/Doc/library/unittest.mock.rst index ee4c7b2ed252b0..d8ba24c3146cf2 100644 --- a/Doc/library/unittest.mock.rst +++ b/Doc/library/unittest.mock.rst @@ -1,6 +1,5 @@ - -:mod:`unittest.mock` --- mock object library -============================================ +:mod:`!unittest.mock` --- mock object library +============================================= .. module:: unittest.mock :synopsis: Mock object library. @@ -415,13 +414,13 @@ the *new_callable* argument to :func:`patch`. This can be useful where you want to make a series of assertions that reuse the same object. Note that :meth:`reset_mock` *doesn't* clear the - return value, :attr:`side_effect` or any child attributes you have + :attr:`return_value`, :attr:`side_effect` or any child attributes you have set using normal assignment by default. In case you want to reset - *return_value* or :attr:`side_effect`, then pass the corresponding + :attr:`return_value` or :attr:`side_effect`, then pass the corresponding parameter as ``True``. Child mocks and the return value mock (if any) are reset as well. - .. note:: *return_value*, and :attr:`side_effect` are keyword-only + .. note:: *return_value*, and *side_effect* are keyword-only arguments. @@ -2585,40 +2584,16 @@ called incorrectly. Before I explain how auto-speccing works, here's why it is needed. -:class:`Mock` is a very powerful and flexible object, but it suffers from two flaws -when used to mock out objects from a system under test. One of these flaws is -specific to the :class:`Mock` api and the other is a more general problem with using -mock objects. - -First the problem specific to :class:`Mock`. :class:`Mock` has two assert methods that are -extremely handy: :meth:`~Mock.assert_called_with` and -:meth:`~Mock.assert_called_once_with`. - - >>> mock = Mock(name='Thing', return_value=None) - >>> mock(1, 2, 3) - >>> mock.assert_called_once_with(1, 2, 3) - >>> mock(1, 2, 3) - >>> mock.assert_called_once_with(1, 2, 3) - Traceback (most recent call last): - ... - AssertionError: Expected 'mock' to be called once. Called 2 times. - -Because mocks auto-create attributes on demand, and allow you to call them -with arbitrary arguments, if you misspell one of these assert methods then -your assertion is gone: - -.. code-block:: pycon - - >>> mock = Mock(name='Thing', return_value=None) - >>> mock(1, 2, 3) - >>> mock.assret_called_once_with(4, 5, 6) # Intentional typo! +:class:`Mock` is a very powerful and flexible object, but it suffers from a flaw which +is general to mocking. If you refactor some of your code, rename members and so on, any +tests for code that is still using the *old api* but uses mocks instead of the real +objects will still pass. This means your tests can all pass even though your code is +broken. -Your tests can pass silently and incorrectly because of the typo. +.. versionchanged:: 3.5 -The second issue is more general to mocking. If you refactor some of your -code, rename members and so on, any tests for code that is still using the -*old api* but uses mocks instead of the real objects will still pass. This -means your tests can all pass even though your code is broken. + Before 3.5, tests with a typo in the word assert would silently pass when they should + raise an error. You can still achieve this behavior by passing ``unsafe=True`` to Mock. Note that this is another reason why you need integration tests as well as unit tests. Testing everything in isolation is all fine and dandy, but if you diff --git a/Doc/library/unittest.rst b/Doc/library/unittest.rst index 3af29f19c802c7..eb42210e096ecb 100644 --- a/Doc/library/unittest.rst +++ b/Doc/library/unittest.rst @@ -1,5 +1,5 @@ -:mod:`unittest` --- Unit testing framework -========================================== +:mod:`!unittest` --- Unit testing framework +=========================================== .. module:: unittest :synopsis: Unit testing framework for Python. diff --git a/Doc/library/urllib.error.rst b/Doc/library/urllib.error.rst index facb11f42a40c5..1686ddd09caa48 100644 --- a/Doc/library/urllib.error.rst +++ b/Doc/library/urllib.error.rst @@ -1,5 +1,5 @@ -:mod:`urllib.error` --- Exception classes raised by urllib.request -================================================================== +:mod:`!urllib.error` --- Exception classes raised by urllib.request +=================================================================== .. module:: urllib.error :synopsis: Exception classes raised by urllib.request. diff --git a/Doc/library/urllib.parse.rst b/Doc/library/urllib.parse.rst index 59fb14960ba9f6..cd402e87a8224b 100644 --- a/Doc/library/urllib.parse.rst +++ b/Doc/library/urllib.parse.rst @@ -1,5 +1,5 @@ -:mod:`urllib.parse` --- Parse URLs into components -================================================== +:mod:`!urllib.parse` --- Parse URLs into components +=================================================== .. module:: urllib.parse :synopsis: Parse URLs into or assemble them from components. diff --git a/Doc/library/urllib.request.rst b/Doc/library/urllib.request.rst index c1e60a46774704..754405e0fbe5b2 100644 --- a/Doc/library/urllib.request.rst +++ b/Doc/library/urllib.request.rst @@ -1,5 +1,5 @@ -:mod:`urllib.request` --- Extensible library for opening URLs -============================================================= +:mod:`!urllib.request` --- Extensible library for opening URLs +============================================================== .. module:: urllib.request :synopsis: Extensible library for opening URLs. @@ -218,7 +218,7 @@ The following classes are provided: An appropriate ``Content-Type`` header should be included if the *data* argument is present. If this header has not been provided and *data* - is not None, ``Content-Type: application/x-www-form-urlencoded`` will + is not ``None``, ``Content-Type: application/x-www-form-urlencoded`` will be added as a default. The next two arguments are only of interest for correct handling diff --git a/Doc/library/urllib.robotparser.rst b/Doc/library/urllib.robotparser.rst index b5a49d9c592387..016fcdc75da67a 100644 --- a/Doc/library/urllib.robotparser.rst +++ b/Doc/library/urllib.robotparser.rst @@ -1,5 +1,5 @@ -:mod:`urllib.robotparser` --- Parser for robots.txt -==================================================== +:mod:`!urllib.robotparser` --- Parser for robots.txt +===================================================== .. module:: urllib.robotparser :synopsis: Load a robots.txt file and answer questions about diff --git a/Doc/library/urllib.rst b/Doc/library/urllib.rst index 624e164625556a..7d9f39ef070fc3 100644 --- a/Doc/library/urllib.rst +++ b/Doc/library/urllib.rst @@ -1,5 +1,5 @@ -:mod:`urllib` --- URL handling modules -====================================== +:mod:`!urllib` --- URL handling modules +======================================= .. module:: urllib diff --git a/Doc/library/uuid.rst b/Doc/library/uuid.rst index e2d231da38fd9a..0f2d7820cb25c8 100644 --- a/Doc/library/uuid.rst +++ b/Doc/library/uuid.rst @@ -1,5 +1,5 @@ -:mod:`uuid` --- UUID objects according to :rfc:`4122` -===================================================== +:mod:`!uuid` --- UUID objects according to :rfc:`4122` +====================================================== .. module:: uuid :synopsis: UUID objects (universally unique identifiers) according to RFC 4122 diff --git a/Doc/library/venv.rst b/Doc/library/venv.rst index cdd1fde2e44b00..fff1075c2473eb 100644 --- a/Doc/library/venv.rst +++ b/Doc/library/venv.rst @@ -1,5 +1,5 @@ -:mod:`venv` --- Creation of virtual environments -================================================ +:mod:`!venv` --- Creation of virtual environments +================================================= .. module:: venv :synopsis: Creation of virtual environments. diff --git a/Doc/library/warnings.rst b/Doc/library/warnings.rst index 500398636e11ae..c66e65abee426f 100644 --- a/Doc/library/warnings.rst +++ b/Doc/library/warnings.rst @@ -1,5 +1,5 @@ -:mod:`warnings` --- Warning control -=================================== +:mod:`!warnings` --- Warning control +==================================== .. module:: warnings :synopsis: Issue warning messages and control their disposition. diff --git a/Doc/library/wave.rst b/Doc/library/wave.rst index 55b029bc742b24..89664693cc87b4 100644 --- a/Doc/library/wave.rst +++ b/Doc/library/wave.rst @@ -1,5 +1,5 @@ -:mod:`wave` --- Read and write WAV files -======================================== +:mod:`!wave` --- Read and write WAV files +========================================= .. module:: wave :synopsis: Provide an interface to the WAV sound format. diff --git a/Doc/library/webbrowser.rst b/Doc/library/webbrowser.rst index 3775d9f9245428..334f21f01c14c0 100644 --- a/Doc/library/webbrowser.rst +++ b/Doc/library/webbrowser.rst @@ -1,5 +1,5 @@ -:mod:`webbrowser` --- Convenient web-browser controller -======================================================= +:mod:`!webbrowser` --- Convenient web-browser controller +======================================================== .. module:: webbrowser :synopsis: Easy-to-use controller for web browsers. diff --git a/Doc/library/winreg.rst b/Doc/library/winreg.rst index 06bd4d87eb03c6..b3a824fb69a49f 100644 --- a/Doc/library/winreg.rst +++ b/Doc/library/winreg.rst @@ -1,5 +1,5 @@ -:mod:`winreg` --- Windows registry access -========================================= +:mod:`!winreg` --- Windows registry access +========================================== .. module:: winreg :platform: Windows diff --git a/Doc/library/winsound.rst b/Doc/library/winsound.rst index 370c5216652ba7..f7ca9dc57bbe28 100644 --- a/Doc/library/winsound.rst +++ b/Doc/library/winsound.rst @@ -1,5 +1,5 @@ -:mod:`winsound` --- Sound-playing interface for Windows -======================================================= +:mod:`!winsound` --- Sound-playing interface for Windows +======================================================== .. module:: winsound :platform: Windows diff --git a/Doc/library/wsgiref.rst b/Doc/library/wsgiref.rst index 7fe84a2de1fceb..e46730f1716761 100644 --- a/Doc/library/wsgiref.rst +++ b/Doc/library/wsgiref.rst @@ -1,5 +1,5 @@ -:mod:`wsgiref` --- WSGI Utilities and Reference Implementation -============================================================== +:mod:`!wsgiref` --- WSGI Utilities and Reference Implementation +=============================================================== .. module:: wsgiref :synopsis: WSGI Utilities and Reference Implementation. diff --git a/Doc/library/xml.dom.minidom.rst b/Doc/library/xml.dom.minidom.rst index 72a7a98c2ac4f2..00a18751207e7a 100644 --- a/Doc/library/xml.dom.minidom.rst +++ b/Doc/library/xml.dom.minidom.rst @@ -1,5 +1,5 @@ -:mod:`xml.dom.minidom` --- Minimal DOM implementation -===================================================== +:mod:`!xml.dom.minidom` --- Minimal DOM implementation +====================================================== .. module:: xml.dom.minidom :synopsis: Minimal Document Object Model (DOM) implementation. diff --git a/Doc/library/xml.dom.pulldom.rst b/Doc/library/xml.dom.pulldom.rst index 843c2fd7fdb937..fd96765cbe3c96 100644 --- a/Doc/library/xml.dom.pulldom.rst +++ b/Doc/library/xml.dom.pulldom.rst @@ -1,5 +1,5 @@ -:mod:`xml.dom.pulldom` --- Support for building partial DOM trees -================================================================= +:mod:`!xml.dom.pulldom` --- Support for building partial DOM trees +================================================================== .. module:: xml.dom.pulldom :synopsis: Support for building partial DOM trees from SAX events. diff --git a/Doc/library/xml.dom.rst b/Doc/library/xml.dom.rst index d0e1b248d595d1..f33b19bc2724d0 100644 --- a/Doc/library/xml.dom.rst +++ b/Doc/library/xml.dom.rst @@ -1,5 +1,5 @@ -:mod:`xml.dom` --- The Document Object Model API -================================================ +:mod:`!xml.dom` --- The Document Object Model API +================================================= .. module:: xml.dom :synopsis: Document Object Model API for Python. diff --git a/Doc/library/xml.etree.elementtree.rst b/Doc/library/xml.etree.elementtree.rst index 30a7b653f940e9..a6a9eb87f56d88 100644 --- a/Doc/library/xml.etree.elementtree.rst +++ b/Doc/library/xml.etree.elementtree.rst @@ -1,5 +1,5 @@ -:mod:`xml.etree.ElementTree` --- The ElementTree XML API -======================================================== +:mod:`!xml.etree.ElementTree` --- The ElementTree XML API +========================================================= .. module:: xml.etree.ElementTree :synopsis: Implementation of the ElementTree API. diff --git a/Doc/library/xml.sax.handler.rst b/Doc/library/xml.sax.handler.rst index e2f28e3244cb09..c2c9d6424b5072 100644 --- a/Doc/library/xml.sax.handler.rst +++ b/Doc/library/xml.sax.handler.rst @@ -1,5 +1,5 @@ -:mod:`xml.sax.handler` --- Base classes for SAX handlers -======================================================== +:mod:`!xml.sax.handler` --- Base classes for SAX handlers +========================================================= .. module:: xml.sax.handler :synopsis: Base classes for SAX event handlers. diff --git a/Doc/library/xml.sax.reader.rst b/Doc/library/xml.sax.reader.rst index 113e9e93fb04ff..b0bc84062e0719 100644 --- a/Doc/library/xml.sax.reader.rst +++ b/Doc/library/xml.sax.reader.rst @@ -1,5 +1,5 @@ -:mod:`xml.sax.xmlreader` --- Interface for XML parsers -====================================================== +:mod:`!xml.sax.xmlreader` --- Interface for XML parsers +======================================================= .. module:: xml.sax.xmlreader :synopsis: Interface which SAX-compliant XML parsers must implement. diff --git a/Doc/library/xml.sax.rst b/Doc/library/xml.sax.rst index 6d351dfb4d7072..c60e9e505f7544 100644 --- a/Doc/library/xml.sax.rst +++ b/Doc/library/xml.sax.rst @@ -1,5 +1,5 @@ -:mod:`xml.sax` --- Support for SAX2 parsers -=========================================== +:mod:`!xml.sax` --- Support for SAX2 parsers +============================================ .. module:: xml.sax :synopsis: Package containing SAX2 base classes and convenience functions. diff --git a/Doc/library/xml.sax.utils.rst b/Doc/library/xml.sax.utils.rst index 3a524c9c0d5a9f..5ee11d58c3dd26 100644 --- a/Doc/library/xml.sax.utils.rst +++ b/Doc/library/xml.sax.utils.rst @@ -1,5 +1,5 @@ -:mod:`xml.sax.saxutils` --- SAX Utilities -========================================= +:mod:`!xml.sax.saxutils` --- SAX Utilities +========================================== .. module:: xml.sax.saxutils :synopsis: Convenience functions and classes for use with SAX. diff --git a/Doc/library/xmlrpc.client.rst b/Doc/library/xmlrpc.client.rst index f7f23007fb0522..614fb19d1f56b6 100644 --- a/Doc/library/xmlrpc.client.rst +++ b/Doc/library/xmlrpc.client.rst @@ -1,5 +1,5 @@ -:mod:`xmlrpc.client` --- XML-RPC client access -============================================== +:mod:`!xmlrpc.client` --- XML-RPC client access +=============================================== .. module:: xmlrpc.client :synopsis: XML-RPC client access. diff --git a/Doc/library/xmlrpc.server.rst b/Doc/library/xmlrpc.server.rst index ca1ea455f0acfc..06169c7eca8b0c 100644 --- a/Doc/library/xmlrpc.server.rst +++ b/Doc/library/xmlrpc.server.rst @@ -1,5 +1,5 @@ -:mod:`xmlrpc.server` --- Basic XML-RPC servers -============================================== +:mod:`!xmlrpc.server` --- Basic XML-RPC servers +=============================================== .. module:: xmlrpc.server :synopsis: Basic XML-RPC server implementations. diff --git a/Doc/library/zipapp.rst b/Doc/library/zipapp.rst index c8a059bdb1cb93..cf561b454e934f 100644 --- a/Doc/library/zipapp.rst +++ b/Doc/library/zipapp.rst @@ -1,5 +1,5 @@ -:mod:`zipapp` --- Manage executable Python zip archives -======================================================= +:mod:`!zipapp` --- Manage executable Python zip archives +======================================================== .. module:: zipapp :synopsis: Manage executable Python zip archives diff --git a/Doc/library/zipfile.rst b/Doc/library/zipfile.rst index 6192689c3a1194..5583c6b24be5c6 100644 --- a/Doc/library/zipfile.rst +++ b/Doc/library/zipfile.rst @@ -1,5 +1,5 @@ -:mod:`zipfile` --- Work with ZIP archives -========================================= +:mod:`!zipfile` --- Work with ZIP archives +========================================== .. module:: zipfile :synopsis: Read and write ZIP-format archive files. @@ -585,6 +585,15 @@ Path objects are traversable using the ``/`` operator or ``joinpath``. Return ``True`` if the current context references a file. +.. method:: Path.is_symlink() + + Return ``True`` if the current context references a symbolic link. + + .. versionadded:: 3.12 + + .. versionchanged:: 3.13 + Previously, ``is_symlink`` would unconditionally return ``False``. + .. method:: Path.exists() Return ``True`` if the current context references a file or diff --git a/Doc/library/zipimport.rst b/Doc/library/zipimport.rst index 7a8c837307e60a..9353a45bdcecba 100644 --- a/Doc/library/zipimport.rst +++ b/Doc/library/zipimport.rst @@ -1,5 +1,5 @@ -:mod:`zipimport` --- Import modules from Zip archives -===================================================== +:mod:`!zipimport` --- Import modules from Zip archives +====================================================== .. module:: zipimport :synopsis: Support for importing Python modules from ZIP archives. diff --git a/Doc/library/zlib.rst b/Doc/library/zlib.rst index ac179722dee2be..965b82a3daffb9 100644 --- a/Doc/library/zlib.rst +++ b/Doc/library/zlib.rst @@ -1,5 +1,5 @@ -:mod:`zlib` --- Compression compatible with :program:`gzip` -=========================================================== +:mod:`!zlib` --- Compression compatible with :program:`gzip` +============================================================ .. module:: zlib :synopsis: Low-level interface to compression and decompression routines diff --git a/Doc/library/zoneinfo.rst b/Doc/library/zoneinfo.rst index 54f1988375570c..a57f3b8b3e858c 100644 --- a/Doc/library/zoneinfo.rst +++ b/Doc/library/zoneinfo.rst @@ -1,5 +1,5 @@ -:mod:`zoneinfo` --- IANA time zone support -========================================== +:mod:`!zoneinfo` --- IANA time zone support +=========================================== .. module:: zoneinfo :synopsis: IANA time zone support diff --git a/Doc/license.rst b/Doc/license.rst index 814b6829f6b2bd..674ac5f56e6f97 100644 --- a/Doc/license.rst +++ b/Doc/license.rst @@ -1047,27 +1047,27 @@ https://www.w3.org/TR/xml-c14n2-testcases/ and is distributed under the mimalloc -------- -MIT License - -Copyright (c) 2018-2021 Microsoft Corporation, Daan Leijen - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. +MIT License:: + + Copyright (c) 2018-2021 Microsoft Corporation, Daan Leijen + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. asyncio diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst index f5e87160732056..b78b38184f2c59 100644 --- a/Doc/reference/datamodel.rst +++ b/Doc/reference/datamodel.rst @@ -1240,7 +1240,7 @@ Methods on code objects The iterator returns :class:`tuple`\s containing the ``(start_line, end_line, start_column, end_column)``. The *i-th* tuple corresponds to the - position of the source code that compiled to the *i-th* instruction. + position of the source code that compiled to the *i-th* code unit. Column information is 0-indexed utf-8 byte offsets on the given source line. @@ -1344,13 +1344,13 @@ Special read-only attributes ``object.__getattr__`` with arguments ``obj`` and ``"f_code"``. * - .. attribute:: frame.f_locals - - The dictionary used by the frame to look up + - The mapping used by the frame to look up :ref:`local variables `. - If the frame refers to a function or comprehension, + If the frame refers to an :term:`optimized scope`, this may return a write-through proxy object. .. versionchanged:: 3.13 - Return a proxy for functions and comprehensions. + Return a proxy for optimized scopes. * - .. attribute:: frame.f_globals - The dictionary used by the frame to look up diff --git a/Doc/requirements-oldest-sphinx.txt b/Doc/requirements-oldest-sphinx.txt index fe45c91501a56a..3ae65bc944da26 100644 --- a/Doc/requirements-oldest-sphinx.txt +++ b/Doc/requirements-oldest-sphinx.txt @@ -23,7 +23,7 @@ Jinja2==3.1.4 MarkupSafe==2.1.5 packaging==24.0 Pygments==2.18.0 -requests==2.31.0 +requests==2.32.2 snowballstemmer==2.2.0 Sphinx==6.2.1 sphinxcontrib-applehelp==1.0.8 diff --git a/Doc/requirements.txt b/Doc/requirements.txt index 15675ab45fea71..b47a9d8a8635ab 100644 --- a/Doc/requirements.txt +++ b/Doc/requirements.txt @@ -10,7 +10,6 @@ sphinx~=7.3.0 blurb -sphinx-autobuild sphinxext-opengraph==0.7.5 sphinx-notfound-page==1.0.0 diff --git a/Doc/tools/check-warnings.py b/Doc/tools/check-warnings.py index 809a8d63087e12..c50b00636c36ce 100644 --- a/Doc/tools/check-warnings.py +++ b/Doc/tools/check-warnings.py @@ -13,6 +13,9 @@ from pathlib import Path from typing import TextIO +# Fail if NEWS nit found before this line number +NEWS_NIT_THRESHOLD = 200 + # Exclude these whether they're dirty or clean, # because they trigger a rebuild of dirty files. EXCLUDE_FILES = { @@ -245,6 +248,32 @@ def fail_if_improved( return 0 +def fail_if_new_news_nit(warnings: list[str], threshold: int) -> int: + """ + Ensure no warnings are found in the NEWS file before a given line number. + """ + news_nits = ( + warning + for warning in warnings + if "/build/NEWS:" in warning + ) + + # Nits found before the threshold line + new_news_nits = [ + nit + for nit in news_nits + if int(nit.split(":")[1]) <= threshold + ] + + if new_news_nits: + print("\nError: new NEWS nits:\n") + for warning in new_news_nits: + print(warning) + return -1 + + return 0 + + def main(argv: list[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument( @@ -264,6 +293,14 @@ def main(argv: list[str] | None = None) -> int: action="store_true", help="Fail if new files with no nits are found", ) + parser.add_argument( + "--fail-if-new-news-nit", + metavar="threshold", + type=int, + nargs="?", + const=NEWS_NIT_THRESHOLD, + help="Fail if new NEWS nit found before threshold line number", + ) args = parser.parse_args(argv) if args.annotate_diff is not None and len(args.annotate_diff) > 2: @@ -304,6 +341,9 @@ def main(argv: list[str] | None = None) -> int: if args.fail_if_improved: exit_code += fail_if_improved(files_with_expected_nits, files_with_nits) + if args.fail_if_new_news_nit: + exit_code += fail_if_new_news_nit(warnings, args.fail_if_new_news_nit) + return exit_code diff --git a/Doc/tools/extensions/glossary_search.py b/Doc/tools/extensions/glossary_search.py index 59a6862ea3d3f4..7c93b1e4990603 100644 --- a/Doc/tools/extensions/glossary_search.py +++ b/Doc/tools/extensions/glossary_search.py @@ -20,13 +20,13 @@ def process_glossary_nodes(app, doctree, fromdocname): - if app.builder.format != 'html': + if app.builder.format != 'html' or app.builder.embedded: return terms = {} - for node in doctree.traverse(glossary): - for glossary_item in node.traverse(definition_list_item): + for node in doctree.findall(glossary): + for glossary_item in node.findall(definition_list_item): term = glossary_item[0].astext().lower() definition = glossary_item[1] diff --git a/Doc/tools/extensions/pyspecific.py b/Doc/tools/extensions/pyspecific.py index b35bedcb0901f1..7a2a85346b3531 100644 --- a/Doc/tools/extensions/pyspecific.py +++ b/Doc/tools/extensions/pyspecific.py @@ -604,7 +604,7 @@ def parse_monitoring_event(env, sig, signode): def process_audit_events(app, doctree, fromdocname): - for node in doctree.traverse(audit_event_list): + for node in doctree.findall(audit_event_list): break else: return @@ -663,7 +663,7 @@ def process_audit_events(app, doctree, fromdocname): body += row - for node in doctree.traverse(audit_event_list): + for node in doctree.findall(audit_event_list): node.replace_self(table) diff --git a/Doc/tools/static/rtd_switcher.js b/Doc/tools/static/rtd_switcher.js new file mode 100644 index 00000000000000..a67bb85505a9ca --- /dev/null +++ b/Doc/tools/static/rtd_switcher.js @@ -0,0 +1,88 @@ + function onSwitch(event) { + const option = event.target.selectedIndex; + const item = event.target.options[option]; + window.location.href = item.dataset.url; + } + + document.addEventListener("readthedocs-addons-data-ready", function(event) { + const config = event.detail.data() + + // Add some mocked hardcoded versions pointing to the official + // documentation while migrating to Read the Docs. + // These are only for testing purposes. + // TODO: remove them when managing all the versions on Read the Docs, + // since all the "active, built and not hidden" versions will be shown automatically. + let versions = config.versions.active.concat([ + { + slug: "dev (3.14)", + urls: { + documentation: "https://docs.python.org/3.14/", + } + }, + { + slug: "dev (3.13)", + urls: { + documentation: "https://docs.python.org/3.13/", + } + }, + { + slug: "3.12", + urls: { + documentation: "https://docs.python.org/3.12/", + } + }, + { + slug: "3.11", + urls: { + documentation: "https://docs.python.org/3.11/", + } + }, + ]); + + const versionSelect = ` + + `; + + // Prepend the current language to the options on the selector + let languages = config.projects.translations.concat(config.projects.current); + languages = languages.sort((a, b) => a.language.name.localeCompare(b.language.name)); + + const languageSelect = ` + + `; + + // Query all the placeholders because there are different ones for Desktop/Mobile + const versionPlaceholders = document.querySelectorAll(".version_switcher_placeholder"); + for (placeholder of versionPlaceholders) { + placeholder.innerHTML = versionSelect; + let selectElement = placeholder.querySelector("select"); + selectElement.addEventListener("change", onSwitch); + } + + const languagePlaceholders = document.querySelectorAll(".language_switcher_placeholder"); + for (placeholder of languagePlaceholders) { + placeholder.innerHTML = languageSelect; + let selectElement = placeholder.querySelector("select"); + selectElement.addEventListener("change", onSwitch); + } + }); diff --git a/Doc/tools/templates/layout.html b/Doc/tools/templates/layout.html index 3c12b01b558f83..3f88fc8e91faad 100644 --- a/Doc/tools/templates/layout.html +++ b/Doc/tools/templates/layout.html @@ -42,89 +42,8 @@ {{ super() }} - - +{%- if not embedded %} + + +{%- endif %} {% endblock %} diff --git a/Doc/tutorial/classes.rst b/Doc/tutorial/classes.rst index 7ab528acb370f2..1b64741c349ee9 100644 --- a/Doc/tutorial/classes.rst +++ b/Doc/tutorial/classes.rst @@ -338,11 +338,7 @@ code will print the value ``16``, without leaving a trace:: del x.counter The other kind of instance attribute reference is a *method*. A method is a -function that "belongs to" an object. (In Python, the term method is not unique -to class instances: other object types can have methods as well. For example, -list objects have methods called append, insert, remove, sort, and so on. -However, in the following discussion, we'll use the term method exclusively to -mean methods of class instance objects, unless explicitly stated otherwise.) +function that "belongs to" an object. .. index:: pair: object; method diff --git a/Doc/tutorial/datastructures.rst b/Doc/tutorial/datastructures.rst index de2827461e2f24..a1492298bdb867 100644 --- a/Doc/tutorial/datastructures.rst +++ b/Doc/tutorial/datastructures.rst @@ -126,7 +126,7 @@ Python. Another thing you might notice is that not all data can be sorted or compared. For instance, ``[None, 'hello', 10]`` doesn't sort because -integers can't be compared to strings and *None* can't be compared to +integers can't be compared to strings and ``None`` can't be compared to other types. Also, there are some types that don't have a defined ordering relation. For example, ``3+4j < 5+7j`` isn't a valid comparison. diff --git a/Doc/tutorial/inputoutput.rst b/Doc/tutorial/inputoutput.rst index fe9ca9ccb9c7e0..857068a51ab843 100644 --- a/Doc/tutorial/inputoutput.rst +++ b/Doc/tutorial/inputoutput.rst @@ -37,16 +37,23 @@ printing space-separated values. There are several ways to format output. * The :meth:`str.format` method of strings requires more manual effort. You'll still use ``{`` and ``}`` to mark where a variable will be substituted and can provide detailed formatting directives, - but you'll also need to provide the information to be formatted. + but you'll also need to provide the information to be formatted. In the following code + block there are two examples of how to format variables: + :: >>> yes_votes = 42_572_654 - >>> no_votes = 43_132_495 - >>> percentage = yes_votes / (yes_votes + no_votes) + >>> total_votes = 85_705_149 + >>> percentage = yes_votes / total_votes >>> '{:-9} YES votes {:2.2%}'.format(yes_votes, percentage) ' 42572654 YES votes 49.67%' + Notice how the ``yes_votes`` are padded with spaces and a negative sign only for negative numbers. + The example also prints ``percentage`` multiplied by 100, with 2 decimal + places and followed by a percent sign (see :ref:`formatspec` for details). + + * Finally, you can do all the string handling yourself by using string slicing and concatenation operations to create any layout you can imagine. The string type has some methods that perform useful operations for padding @@ -197,7 +204,12 @@ notation. :: Jack: 4098; Sjoerd: 4127; Dcab: 8637678 This is particularly useful in combination with the built-in function -:func:`vars`, which returns a dictionary containing all local variables. +:func:`vars`, which returns a dictionary containing all local variables:: + + >>> table = {k: str(v) for k, v in vars().items()} + >>> message = " ".join([f'{k}: ' + '{' + k +'};' for k in table.keys()]) + >>> print(message.format(**table)) + __name__: __main__; __doc__: None; __package__: None; __loader__: ... As an example, the following lines produce a tidily aligned set of columns giving integers and their squares and cubes:: diff --git a/Doc/tutorial/venv.rst b/Doc/tutorial/venv.rst index a6dead2eac11f6..91e4ce18acef1d 100644 --- a/Doc/tutorial/venv.rst +++ b/Doc/tutorial/venv.rst @@ -36,10 +36,10 @@ Creating Virtual Environments ============================= The module used to create and manage virtual environments is called -:mod:`venv`. :mod:`venv` will usually install the most recent version of -Python that you have available. If you have multiple versions of Python on your -system, you can select a specific Python version by running ``python3`` or -whichever version you want. +:mod:`venv`. :mod:`venv` will install the Python version from which +the command was run (as reported by the :option:`--version` option). +For instance, executing the command with ``python3.12`` will install +version 3.12. To create a virtual environment, decide upon a directory where you want to place it, and run the :mod:`venv` module as a script with the directory path:: diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst index 2d95fa9474033f..0620f7d7114ba5 100644 --- a/Doc/using/cmdline.rst +++ b/Doc/using/cmdline.rst @@ -616,7 +616,8 @@ Miscellaneous options * :samp:`-X gil={0,1}` forces the GIL to be disabled or enabled, respectively. Only available in builds configured with - :option:`--disable-gil`. See also :envvar:`PYTHON_GIL`. + :option:`--disable-gil`. See also :envvar:`PYTHON_GIL` and + :ref:`free-threaded-cpython`. .. versionadded:: 3.13 @@ -1206,7 +1207,7 @@ conflict. forced on. Setting it to ``0`` forces the GIL off. See also the :option:`-X gil <-X>` command-line option, which takes - precedence over this variable. + precedence over this variable, and :ref:`free-threaded-cpython`. Needs Python configured with the :option:`--disable-gil` build option. diff --git a/Doc/using/configure.rst b/Doc/using/configure.rst index d30356d2058eff..428ee5275276a0 100644 --- a/Doc/using/configure.rst +++ b/Doc/using/configure.rst @@ -299,7 +299,7 @@ General Options Defines the ``Py_GIL_DISABLED`` macro and adds ``"t"`` to :data:`sys.abiflags`. - See :pep:`703` "Making the Global Interpreter Lock Optional in CPython". + See :ref:`free-threaded-cpython` for more detail. .. versionadded:: 3.13 diff --git a/Doc/using/ios.rst b/Doc/using/ios.rst index da8f42048c0faf..71fc29c450c8eb 100644 --- a/Doc/using/ios.rst +++ b/Doc/using/ios.rst @@ -303,8 +303,8 @@ modules in your app, some additional steps will be required: * You need to ensure that any folders containing third-party binaries are either associated with the app target, or copied in as part of step 8. Step 8 should also purge any binaries that are not appropriate for the platform a - specific build is targetting (i.e., delete any device binaries if you're - building app app targeting the simulator). + specific build is targeting (i.e., delete any device binaries if you're + building an app targeting the simulator). * Any folders that contain third-party binaries must be processed into framework form by step 9. The invocation of ``install_dylib`` that processes diff --git a/Doc/whatsnew/2.2.rst b/Doc/whatsnew/2.2.rst index e6c13f957b8d54..d4dbe0570fbda5 100644 --- a/Doc/whatsnew/2.2.rst +++ b/Doc/whatsnew/2.2.rst @@ -1062,7 +1062,7 @@ code, none of the changes described here will affect you very much. simply been changed to use the new C-level interface. (Contributed by Fred L. Drake, Jr.) -* Another low-level API, primarily of interest to implementors of Python +* Another low-level API, primarily of interest to implementers of Python debuggers and development tools, was added. :c:func:`PyInterpreterState_Head` and :c:func:`PyInterpreterState_Next` let a caller walk through all the existing interpreter objects; :c:func:`PyInterpreterState_ThreadHead` and diff --git a/Doc/whatsnew/2.7.rst b/Doc/whatsnew/2.7.rst index 031777b9cf6413..c45f0887b41f4f 100644 --- a/Doc/whatsnew/2.7.rst +++ b/Doc/whatsnew/2.7.rst @@ -1738,7 +1738,7 @@ New module: importlib Python 3.1 includes the :mod:`importlib` package, a re-implementation of the logic underlying Python's :keyword:`import` statement. -:mod:`importlib` is useful for implementors of Python interpreters and +:mod:`importlib` is useful for implementers of Python interpreters and to users who wish to write new importers that can participate in the import process. Python 2.7 doesn't contain the complete :mod:`importlib` package, but instead has a tiny subset that contains diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst index 8757311a484257..d9a314ecafa4a2 100644 --- a/Doc/whatsnew/3.12.rst +++ b/Doc/whatsnew/3.12.rst @@ -1251,7 +1251,7 @@ Deprecated :exc:`DeprecationWarning` when it can detect being called from a multithreaded process. There has always been a fundamental incompatibility with the POSIX platform when doing so. Even if such code *appeared* to work. - We added the warning to to raise awareness as issues encounted by code doing + We added the warning to raise awareness as issues encountered by code doing this are becoming more frequent. See the :func:`os.fork` documentation for more details along with `this discussion on fork being incompatible with threads `_ for *why* we're now surfacing this @@ -1739,7 +1739,7 @@ unittest * Undocumented :meth:`TestLoader.loadTestsFromModule ` parameter *use_load_tests* - (deprecated and ignored since Python 3.2). + (deprecated and ignored since Python 3.5). * An alias of the :class:`~unittest.TextTestResult` class: ``_TextTestResult`` (deprecated in Python 3.2). diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst index 69264a3672bbbf..b4b7cb9be52784 100644 --- a/Doc/whatsnew/3.13.rst +++ b/Doc/whatsnew/3.13.rst @@ -94,16 +94,20 @@ Interpreter improvements: Performance improvements are modest -- we expect to be improving this over the next few releases. -* :pep:`667`: :attr:`FrameType.f_locals ` when used in - a function now returns a write-through proxy to the frame's locals, - rather than a ``dict``. See the PEP for corresponding C API changes - and deprecations. +* :pep:`667`: The :func:`locals` builtin now has + :ref:`defined semantics ` when mutating the + returned mapping. Python debuggers and similar tools may now more reliably + update local variables in optimized scopes even during concurrent code + execution. New typing features: * :pep:`696`: Type parameters (:data:`typing.TypeVar`, :data:`typing.ParamSpec`, and :data:`typing.TypeVarTuple`) now support defaults. +* :pep:`702`: Support for marking deprecations in the type system using the + new :func:`warnings.deprecated` decorator. + * :pep:`742`: :data:`typing.TypeIs` was added, providing more intuitive type narrowing behavior. @@ -150,10 +154,10 @@ New Features A Better Interactive Interpreter -------------------------------- -On Unix-like systems like Linux or macOS, Python now uses a new -:term:`interactive` shell. When the user starts the :term:`REPL` from an -interactive terminal, and both :mod:`curses` and :mod:`readline` are -available, the interactive shell now supports the following new features: +On Unix-like systems like Linux or macOS as well as Windows, Python now +uses a new :term:`interactive` shell. When the user starts the +:term:`REPL` from an interactive terminal the interactive shell now +supports the following new features: * Colorized prompts. * Multiline editing with history preservation. @@ -170,10 +174,13 @@ available, the interactive shell now supports the following new features: If the new interactive shell is not desired, it can be disabled via the :envvar:`PYTHON_BASIC_REPL` environment variable. +The new shell requires :mod:`curses` on Unix-like systems. + For more on interactive mode, see :ref:`tut-interac`. (Contributed by Pablo Galindo Salgado, Łukasz Langa, and -Lysandros Nikolaou in :gh:`111201` based on code from the PyPy project.) +Lysandros Nikolaou in :gh:`111201` based on code from the PyPy project. +Windows support contributed by Dino Viehland and Anthony Shaw.) .. _whatsnew313-improved-error-messages: @@ -244,6 +251,50 @@ Improved Error Messages through ``self.X`` from any function in its body. (Contributed by Irit Katriel in :gh:`115775`.) +.. _whatsnew313-locals-semantics: + +Defined mutation semantics for ``locals()`` +------------------------------------------- + +Historically, the expected result of mutating the return value of :func:`locals` +has been left to individual Python implementations to define. + +Through :pep:`667`, Python 3.13 standardises the historical behaviour of CPython +for most code execution scopes, but changes +:term:`optimized scopes ` (functions, generators, coroutines, +comprehensions, and generator expressions) to explicitly return independent +snapshots of the currently assigned local variables, including locally +referenced nonlocal variables captured in closures. + +This change to the semantics of :func:`locals` in optimized scopes also affects the default +behaviour of code execution functions that implicitly target ``locals()`` if no explicit +namespace is provided (such as :func:`exec` and :func:`eval`). In previous versions, whether +or not changes could be accessed by calling ``locals()`` after calling the code execution +function was implementation dependent. In CPython specifically, such code would typically +appear to work as desired, but could sometimes fail in optimized scopes based on other code +(including debuggers and code execution tracing tools) potentially resetting the shared +snapshot in that scope. Now, the code will always run against an independent snapshot of the +local variables in optimized scopes, and hence the changes will never be visible in +subsequent calls to ``locals()``. To access the changes made in these cases, an explicit +namespace reference must now be passed to the relevant function. Alternatively, it may make +sense to update affected code to use a higher level code execution API that returns the +resulting code execution namespace (e.g. :func:`runpy.run_path` when executing Python +files from disk). + +To ensure debuggers and similar tools can reliably update local variables in +scopes affected by this change, :attr:`FrameType.f_locals ` now +returns a write-through proxy to the frame's local and locally referenced +nonlocal variables in these scopes, rather than returning an inconsistently +updated shared ``dict`` instance with undefined runtime semantics. + +See :pep:`667` for more details, including related C API changes and deprecations. Porting +notes are also provided below for the affected :ref:`Python APIs ` +and :ref:`C APIs `. + +(PEP and implementation contributed by Mark Shannon and Tian Gao in +:gh:`74929`. Documentation updates provided by Guido van Rossum and +Alyssa Coghlan.) + Incremental Garbage Collection ------------------------------ @@ -336,7 +387,8 @@ CPython will run with the :term:`global interpreter lock` (GIL) disabled when configured using the ``--disable-gil`` option at build time. This is an experimental feature and therefore isn't used by default. Users need to either compile their own interpreter, or install one of the experimental -builds that are marked as *free-threaded*. +builds that are marked as *free-threaded*. See :pep:`703` "Making the Global +Interpreter Lock Optional in CPython" for more detail. Free-threaded execution allows for full utilization of the available processing power by running threads in parallel on available CPU cores. @@ -521,8 +573,10 @@ ast If an optional field on an AST node is not included as an argument when constructing an instance, the field will now be set to ``None``. Similarly, - if a list field is omitted, that field will now be set to an empty list. - (Previously, in both cases, the attribute would be missing on the newly + if a list field is omitted, that field will now be set to an empty list, + and if a :class:`!ast.expr_context` field is omitted, it defaults to + :class:`Load() `. + (Previously, in all cases, the attribute would be missing on the newly constructed AST node instance.) If other arguments are omitted, a :exc:`DeprecationWarning` is emitted. @@ -534,7 +588,7 @@ ast unless the class opts in to the new behavior by setting the attribute :attr:`ast.AST._field_types`. - (Contributed by Jelle Zijlstra in :gh:`105858` and :gh:`117486`.) + (Contributed by Jelle Zijlstra in :gh:`105858`, :gh:`117486`, and :gh:`118851`.) * :func:`ast.parse` now accepts an optional argument *optimize* which is passed on to the :func:`compile` built-in. This makes it @@ -640,6 +694,13 @@ dis the ``show_offsets`` parameter. (Contributed by Irit Katriel in :gh:`112137`.) +* :meth:`~dis.get_instructions` no longer represents cache entries as + separate instructions. Instead, it returns them as part of the + :class:`~dis.Instruction`, in the new *cache_info* field. The + *show_caches* argument to :meth:`~dis.get_instructions` is + deprecated and no longer has any effect. + (Contributed by Irit Katriel in :gh:`112962`.) + .. _whatsnew313-doctest: doctest @@ -794,6 +855,9 @@ mmap * :class:`mmap.mmap` now has a *trackfd* parameter on Unix; if it is ``False``, the file descriptor specified by *fileno* will not be duplicated. (Contributed by Zackery Spytz and Petr Viktorin in :gh:`78502`.) +* :class:`mmap.mmap` is now protected from crashing on Windows when the mapped memory + is inaccessible due to file system errors or access violations. + (Contributed by Jannis Weigend in :gh:`118209`.) opcode ------ @@ -847,6 +911,12 @@ os :c:func:`!posix_spawn_file_actions_addclosefrom_np`. (Contributed by Jakub Kulik in :gh:`113117`.) +* :func:`os.mkdir` and :func:`os.makedirs` on Windows now support passing a + *mode* value of ``0o700`` to apply access control to the new directory. This + implicitly affects :func:`tempfile.mkdtemp` and is a mitigation for + :cve:`2024-4030`. Other values for *mode* continue to be ignored. + (Contributed by Steve Dower in :gh:`118486`.) + os.path ------- @@ -931,7 +1001,7 @@ random ------ * Add a :ref:`command-line interface `. - (Contributed by Hugo van Kemenade in :gh:`54321`.) + (Contributed by Hugo van Kemenade in :gh:`118131`.) re -- @@ -989,6 +1059,14 @@ sys This function is not guaranteed to exist in all implementations of Python. (Contributed by Serhiy Storchaka in :gh:`78573`.) +tempfile +-------- + +* On Windows, the default mode ``0o700`` used by :func:`tempfile.mkdtemp` now + limits access to the new directory due to changes to :func:`os.mkdir`. This + is a mitigation for :cve:`2024-4030`. + (Contributed by Steve Dower in :gh:`118486`.) + time ---- @@ -1251,7 +1329,7 @@ PEP 594: dead batteries (and other module removals) * :mod:`!sunau`. (Contributed by Victor Stinner in :gh:`104773`.) - * :mod:`!telnetlib`, use the projects :pypi:`telnetlib3 ` or + * :mod:`!telnetlib`, use the projects :pypi:`telnetlib3` or :pypi:`Exscript` instead. (Contributed by Victor Stinner in :gh:`104773`.) @@ -1612,9 +1690,6 @@ Pending Removal in Python 3.14 * ``master_open()``: use :func:`pty.openpty`. * ``slave_open()``: use :func:`pty.openpty`. -* :func:`shutil.rmtree` *onerror* parameter is deprecated in 3.12, - and will be removed in 3.14: use the *onexc* parameter instead. - * :mod:`sqlite3`: * :data:`~sqlite3.version` and :data:`~sqlite3.version_info`. @@ -1790,6 +1865,9 @@ although there is currently no date scheduled for their removal. * :mod:`!sre_compile`, :mod:`!sre_constants` and :mod:`!sre_parse` modules. +* :mod:`shutil`: :func:`~shutil.rmtree`'s *onerror* parameter is deprecated in + Python 3.12; use the *onexc* parameter instead. + * :mod:`ssl` options and protocols: * :class:`ssl.SSLContext` without protocol argument is deprecated. @@ -2076,6 +2154,11 @@ New Features destruction the same way the :mod:`tracemalloc` module does. (Contributed by Pablo Galindo in :gh:`93502`.) +* Add :c:func:`PyEval_GetFrameBuiltins`, :c:func:`PyEval_GetFrameGlobals`, and + :c:func:`PyEval_GetFrameLocals` to the C API. These replacements for + :c:func:`PyEval_GetBuiltins`, :c:func:`PyEval_GetGlobals`, and + :c:func:`PyEval_GetLocals` return :term:`strong references ` + rather than borrowed references. (Added as part of :pep:`667`.) Build Changes ============= @@ -2117,6 +2200,10 @@ Build Changes The bundled mimalloc has custom changes, see :gh:`113141` for details. (Contributed by Dino Viehland in :gh:`109914`.) +* On POSIX systems, the pkg-config (``.pc``) filenames now include the ABI + flags. For example, the free-threaded build generates ``python-3.13t.pc`` + and the debug build generates ``python-3.13d.pc``. + Porting to Python 3.13 ====================== @@ -2155,16 +2242,29 @@ Changes in the Python API returned by :meth:`zipfile.ZipFile.open` was changed from ``'r'`` to ``'rb'``. (Contributed by Serhiy Storchaka in :gh:`115961`.) -* Callbacks registered in the :mod:`tkinter` module now take arguments as - various Python objects (``int``, ``float``, ``bytes``, ``tuple``), - not just ``str``. - To restore the previous behavior set :mod:`!tkinter` module global - :data:`!wantobject` to ``1`` before creating the - :class:`!Tk` object or call the :meth:`!wantobject` - method of the :class:`!Tk` object with argument ``1``. - Calling it with argument ``2`` restores the current default behavior. - (Contributed by Serhiy Storchaka in :gh:`66410`.) - +.. _pep667-porting-notes-py: + +* Calling :func:`locals` in an :term:`optimized scope` now produces an + independent snapshot on each call, and hence no longer implicitly updates + previously returned references. Obtaining the legacy CPython behaviour now + requires explicit calls to update the initially returned dictionary with the + results of subsequent calls to ``locals()``. Code execution functions that + implicitly target ``locals()`` (such as ``exec`` and ``eval``) must be + passed an explicit namespace to access their results in an optimized scope. + (Changed as part of :pep:`667`.) + +* Calling :func:`locals` from a comprehension at module or class scope + (including via ``exec`` or ``eval``) once more behaves as if the comprehension + were running as an independent nested function (i.e. the local variables from + the containing scope are not included). In Python 3.12, this had changed + to include the local variables from the containing scope when implementing + :pep:`709`. (Changed as part of :pep:`667`.) + +* Accessing :attr:`FrameType.f_locals ` in an + :term:`optimized scope` now returns a write-through proxy rather than a + snapshot that gets updated at ill-specified times. If a snapshot is desired, + it must be created explicitly with ``dict`` or the proxy's ``.copy()`` method. + (Changed as part of :pep:`667`.) Changes in the C API -------------------- @@ -2239,6 +2339,33 @@ Changes in the C API to :c:func:`PyUnstable_Code_GetFirstFree`. (Contributed by Bogdan Romanyuk in :gh:`115781`.) +.. _pep667-porting-notes-c: + +* The effects of mutating the dictionary returned from :c:func:`PyEval_GetLocals` in an + :term:`optimized scope` have changed. New dict entries added this way will now *only* be + visible to subsequent :c:func:`PyEval_GetLocals` calls in that frame, as + :c:func:`PyFrame_GetLocals`, :func:`locals`, and + :attr:`FrameType.f_locals ` no longer access the same underlying cached + dictionary. Changes made to entries for actual variable names and names added via the + write-through proxy interfaces will be overwritten on subsequent calls to + :c:func:`PyEval_GetLocals` in that frame. The recommended code update depends on how the + function was being used, so refer to the deprecation notice on the function for details. + (Changed as part of :pep:`667`.) + +* Calling :c:func:`PyFrame_GetLocals` in an :term:`optimized scope` now returns a + write-through proxy rather than a snapshot that gets updated at ill-specified times. + If a snapshot is desired, it must be created explicitly (e.g. with :c:func:`PyDict_Copy`) + or by calling the new :c:func:`PyEval_GetFrameLocals` API. (Changed as part of :pep:`667`.) + +* :c:func:`!PyFrame_FastToLocals` and :c:func:`!PyFrame_FastToLocalsWithError` + no longer have any effect. Calling these functions has been redundant since + Python 3.11, when :c:func:`PyFrame_GetLocals` was first introduced. + (Changed as part of :pep:`667`.) + +* :c:func:`!PyFrame_LocalsToFast` no longer has any effect. Calling this function + is redundant now that :c:func:`PyFrame_GetLocals` returns a write-through proxy + for :term:`optimized scopes `. (Changed as part of :pep:`667`.) + Removed C APIs -------------- @@ -2392,6 +2519,11 @@ Deprecated C APIs :c:func:`PyWeakref_GetRef` on Python 3.12 and older. (Contributed by Victor Stinner in :gh:`105927`.) +* Deprecate the :c:func:`PyEval_GetBuiltins`, :c:func:`PyEval_GetGlobals`, and + :c:func:`PyEval_GetLocals` functions, which return a :term:`borrowed reference`. + Refer to the deprecation notices on each function for their recommended replacements. + (Soft deprecated as part of :pep:`667`.) + Pending Removal in Python 3.14 ------------------------------ diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst index 3dd400c3771ed2..8aef0f5ac26728 100644 --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -2413,7 +2413,7 @@ Changes in the Python API formal public interface the naming has been made consistent (:issue:`18532`). * Because :mod:`unittest.TestSuite` now drops references to tests after they - are run, test harnesses that re-use a :class:`~unittest.TestSuite` to re-run + are run, test harnesses that reuse a :class:`~unittest.TestSuite` to re-run a set of tests may fail. Test suites should not be re-used in this fashion since it means state is retained between test runs, breaking the test isolation that :mod:`unittest` is designed to provide. However, if the lack diff --git a/Doc/whatsnew/3.6.rst b/Doc/whatsnew/3.6.rst index d62beb0bdc8672..68ab43462b743a 100644 --- a/Doc/whatsnew/3.6.rst +++ b/Doc/whatsnew/3.6.rst @@ -2336,10 +2336,10 @@ Changes in the Python API * With the introduction of :exc:`ModuleNotFoundError`, import system consumers may start expecting import system replacements to raise that more specific exception when appropriate, rather than the less-specific :exc:`ImportError`. - To provide future compatibility with such consumers, implementors of + To provide future compatibility with such consumers, implementers of alternative import systems that completely replace :func:`__import__` will need to update their implementations to raise the new subclass when a module - can't be found at all. Implementors of compliant plugins to the default + can't be found at all. Implementers of compliant plugins to the default import system shouldn't need to make any changes, as the default import system will raise the new subclass when appropriate. diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index e29d37ca120b76..90bdcf9541613c 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -891,7 +891,7 @@ Deprecated * Deprecated the ``split()`` method of :class:`!_tkinter.TkappType` in favour of the ``splitlist()`` method which has more consistent and - predicable behavior. + predictable behavior. (Contributed by Serhiy Storchaka in :issue:`38371`.) * The explicit passing of coroutine objects to :func:`asyncio.wait` has been diff --git a/Grammar/python.gram b/Grammar/python.gram index c04bc641779c04..5a181dc1578b2c 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -127,7 +127,6 @@ simple_stmt[stmt_ty] (memo): | &'nonlocal' nonlocal_stmt compound_stmt[stmt_ty]: - | invalid_compound_stmt | &('def' | '@' | 'async') function_def | &'if' if_stmt | &('class' | '@') class_def @@ -1317,10 +1316,6 @@ invalid_import_from_targets: | token=NEWLINE { RAISE_SYNTAX_ERROR_STARTING_FROM(token, "Expected one or more names after 'import'") } -invalid_compound_stmt: - | a='elif' named_expression ':' { RAISE_SYNTAX_ERROR_STARTING_FROM(a, "'elif' must match an if-statement here") } - | a='else' ':' { RAISE_SYNTAX_ERROR_STARTING_FROM(a, "'else' must match a valid statement here") } - invalid_with_stmt: | ['async'] 'with' ','.(expression ['as' star_target])+ NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") } | ['async'] 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") } diff --git a/Include/cpython/longobject.h b/Include/cpython/longobject.h index 189229ee1035d8..96815938c8277a 100644 --- a/Include/cpython/longobject.h +++ b/Include/cpython/longobject.h @@ -60,6 +60,15 @@ PyAPI_FUNC(Py_ssize_t) PyUnstable_Long_CompactValue(const PyLongObject* op); // There are no error cases. PyAPI_FUNC(int) _PyLong_Sign(PyObject *v); +/* _PyLong_NumBits. Return the number of bits needed to represent the + absolute value of a long. For example, this returns 1 for 1 and -1, 2 + for 2 and -2, and 2 for 3 and -3. It returns 0 for 0. + v must not be NULL, and must be a normalized long. + (size_t)-1 is returned and OverflowError set if the true result doesn't + fit in a size_t. +*/ +PyAPI_FUNC(size_t) _PyLong_NumBits(PyObject *v); + /* _PyLong_FromByteArray: View the n unsigned bytes as a binary integer in base 256, and return a Python int with the same numeric value. If n is 0, the integer is 0. Else: diff --git a/Include/cpython/monitoring.h b/Include/cpython/monitoring.h index efb9ec0e587552..797ba51246b1c6 100644 --- a/Include/cpython/monitoring.h +++ b/Include/cpython/monitoring.h @@ -101,7 +101,7 @@ PyAPI_FUNC(int) _PyMonitoring_FirePyUnwindEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset); PyAPI_FUNC(int) -_PyMonitoring_FireStopIterationEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset); +_PyMonitoring_FireStopIterationEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset, PyObject *value); #define _PYMONITORING_IF_ACTIVE(STATE, X) \ @@ -240,11 +240,11 @@ PyMonitoring_FirePyUnwindEvent(PyMonitoringState *state, PyObject *codelike, int } static inline int -PyMonitoring_FireStopIterationEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset) +PyMonitoring_FireStopIterationEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset, PyObject *value) { _PYMONITORING_IF_ACTIVE( state, - _PyMonitoring_FireStopIterationEvent(state, codelike, offset)); + _PyMonitoring_FireStopIterationEvent(state, codelike, offset, value)); } #undef _PYMONITORING_IF_ACTIVE diff --git a/Include/cpython/pyhash.h b/Include/cpython/pyhash.h index 2f8e12c1423aa1..825c034a8d8474 100644 --- a/Include/cpython/pyhash.h +++ b/Include/cpython/pyhash.h @@ -3,7 +3,7 @@ #endif /* Prime multiplier used in string and various other hashes. */ -#define _PyHASH_MULTIPLIER 1000003UL /* 0xf4243 */ +#define PyHASH_MULTIPLIER 1000003UL /* 0xf4243 */ /* Parameters used for the numeric hash implementation. See notes for _Py_HashDouble in Python/pyhash.c. Numeric hashes are based on @@ -17,9 +17,10 @@ #define PyHASH_MODULUS (((size_t)1 << _PyHASH_BITS) - 1) #define PyHASH_INF 314159 -#define PyHASH_IMAG _PyHASH_MULTIPLIER +#define PyHASH_IMAG PyHASH_MULTIPLIER /* Aliases kept for backward compatibility with Python 3.12 */ +#define _PyHASH_MULTIPLIER PyHASH_MULTIPLIER #define _PyHASH_BITS PyHASH_BITS #define _PyHASH_MODULUS PyHASH_MODULUS #define _PyHASH_INF PyHASH_INF diff --git a/Include/cpython/pystate.h b/Include/cpython/pystate.h index 2df9ecd6d52084..ed3ee090ae53db 100644 --- a/Include/cpython/pystate.h +++ b/Include/cpython/pystate.h @@ -83,6 +83,8 @@ struct _ts { unsigned int bound_gilstate:1; /* Currently in use (maybe holds the GIL). */ unsigned int active:1; + /* Currently holds the GIL. */ + unsigned int holds_gil:1; /* various stages of finalization */ unsigned int finalizing:1; @@ -90,7 +92,7 @@ struct _ts { unsigned int finalized:1; /* padding to align to 4 bytes */ - unsigned int :24; + unsigned int :23; } _status; #ifdef Py_BUILD_CORE # define _PyThreadState_WHENCE_NOTSET -1 diff --git a/Include/cpython/weakrefobject.h b/Include/cpython/weakrefobject.h index 9a796098c6b48f..dcca166d7357cc 100644 --- a/Include/cpython/weakrefobject.h +++ b/Include/cpython/weakrefobject.h @@ -40,6 +40,8 @@ struct _PyWeakReference { #endif }; +PyAPI_FUNC(void) _PyWeakref_ClearRef(PyWeakReference *self); + Py_DEPRECATED(3.13) static inline PyObject* PyWeakref_GET_OBJECT(PyObject *ref_obj) { PyWeakReference *ref; diff --git a/Include/internal/mimalloc/mimalloc/internal.h b/Include/internal/mimalloc/mimalloc/internal.h index 8af841cfdffc01..94f88fb603af25 100644 --- a/Include/internal/mimalloc/mimalloc/internal.h +++ b/Include/internal/mimalloc/mimalloc/internal.h @@ -14,8 +14,8 @@ terms of the MIT license. A copy of the license can be found in the file // functions and macros. // -------------------------------------------------------------------------- -#include "mimalloc/types.h" -#include "mimalloc/track.h" +#include "types.h" +#include "track.h" #if (MI_DEBUG>0) #define mi_trace_message(...) _mi_trace_message(__VA_ARGS__) diff --git a/Include/internal/mimalloc/mimalloc/types.h b/Include/internal/mimalloc/mimalloc/types.h index 17e440848ecae5..354839ba955b36 100644 --- a/Include/internal/mimalloc/mimalloc/types.h +++ b/Include/internal/mimalloc/mimalloc/types.h @@ -21,7 +21,7 @@ terms of the MIT license. A copy of the license can be found in the file #include // ptrdiff_t #include // uintptr_t, uint16_t, etc -#include "mimalloc/atomic.h" // _Atomic +#include "atomic.h" // _Atomic #ifdef _MSC_VER #pragma warning(disable:4214) // bitfield is not int diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h index 48ad0678995904..26ede31b1904b4 100644 --- a/Include/internal/pycore_ceval.h +++ b/Include/internal/pycore_ceval.h @@ -131,11 +131,10 @@ extern int _PyEval_ThreadsInitialized(void); extern void _PyEval_InitGIL(PyThreadState *tstate, int own_gil); extern void _PyEval_FiniGIL(PyInterpreterState *interp); -// Acquire the GIL and return 1. In free-threaded builds, this function may -// return 0 to indicate that the GIL was disabled and therefore not acquired. -extern int _PyEval_AcquireLock(PyThreadState *tstate); +extern void _PyEval_AcquireLock(PyThreadState *tstate); -extern void _PyEval_ReleaseLock(PyInterpreterState *, PyThreadState *); +extern void _PyEval_ReleaseLock(PyInterpreterState *, PyThreadState *, + int final_release); #ifdef Py_GIL_DISABLED // Returns 0 or 1 if the GIL for the given thread's interpreter is disabled or @@ -146,7 +145,8 @@ extern void _PyEval_ReleaseLock(PyInterpreterState *, PyThreadState *); static inline int _PyEval_IsGILEnabled(PyThreadState *tstate) { - return tstate->interp->ceval.gil->enabled != 0; + struct _gil_runtime_state *gil = tstate->interp->ceval.gil; + return _Py_atomic_load_int_relaxed(&gil->enabled) != 0; } // Enable or disable the GIL used by the interpreter that owns tstate, which diff --git a/Include/internal/pycore_critical_section.h b/Include/internal/pycore_critical_section.h index 23b85c2f9e9bb2..7bebcdb67c7169 100644 --- a/Include/internal/pycore_critical_section.h +++ b/Include/internal/pycore_critical_section.h @@ -99,15 +99,6 @@ extern "C" { _PyCriticalSection_End(&_cs); \ } -# define Py_XBEGIN_CRITICAL_SECTION(op) \ - { \ - _PyCriticalSection _cs_opt = {0}; \ - _PyCriticalSection_XBegin(&_cs_opt, _PyObject_CAST(op)) - -# define Py_XEND_CRITICAL_SECTION() \ - _PyCriticalSection_XEnd(&_cs_opt); \ - } - # define Py_BEGIN_CRITICAL_SECTION2(a, b) \ { \ _PyCriticalSection2 _cs2; \ @@ -117,6 +108,26 @@ extern "C" { _PyCriticalSection2_End(&_cs2); \ } +// Specialized version of critical section locking to safely use +// PySequence_Fast APIs without the GIL. For performance, the argument *to* +// PySequence_Fast() is provided to the macro, not the *result* of +// PySequence_Fast(), which would require an extra test to determine if the +// lock must be acquired. +# define Py_BEGIN_CRITICAL_SECTION_SEQUENCE_FAST(original) \ + { \ + PyObject *_orig_seq = _PyObject_CAST(original); \ + const bool _should_lock_cs = PyList_CheckExact(_orig_seq); \ + _PyCriticalSection _cs; \ + if (_should_lock_cs) { \ + _PyCriticalSection_Begin(&_cs, &_orig_seq->ob_mutex); \ + } + +# define Py_END_CRITICAL_SECTION_SEQUENCE_FAST() \ + if (_should_lock_cs) { \ + _PyCriticalSection_End(&_cs); \ + } \ + } + // Asserts that the mutex is locked. The mutex must be held by the // top-most critical section otherwise there's the possibility // that the mutex would be swalled out in some code paths. @@ -144,10 +155,10 @@ extern "C" { # define Py_BEGIN_CRITICAL_SECTION_MUT(mut) # define Py_BEGIN_CRITICAL_SECTION(op) # define Py_END_CRITICAL_SECTION() -# define Py_XBEGIN_CRITICAL_SECTION(op) -# define Py_XEND_CRITICAL_SECTION() # define Py_BEGIN_CRITICAL_SECTION2(a, b) # define Py_END_CRITICAL_SECTION2() +# define Py_BEGIN_CRITICAL_SECTION_SEQUENCE_FAST(original) +# define Py_END_CRITICAL_SECTION_SEQUENCE_FAST() # define _Py_CRITICAL_SECTION_ASSERT_MUTEX_LOCKED(mutex) # define _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(op) #endif /* !Py_GIL_DISABLED */ @@ -202,16 +213,6 @@ _PyCriticalSection_Begin(_PyCriticalSection *c, PyMutex *m) } } -static inline void -_PyCriticalSection_XBegin(_PyCriticalSection *c, PyObject *op) -{ -#ifdef Py_GIL_DISABLED - if (op != NULL) { - _PyCriticalSection_Begin(c, &_PyObject_CAST(op)->ob_mutex); - } -#endif -} - // Removes the top-most critical section from the thread's stack of critical // sections. If the new top-most critical section is inactive, then it is // resumed. @@ -234,14 +235,6 @@ _PyCriticalSection_End(_PyCriticalSection *c) _PyCriticalSection_Pop(c); } -static inline void -_PyCriticalSection_XEnd(_PyCriticalSection *c) -{ - if (c->mutex) { - _PyCriticalSection_End(c); - } -} - static inline void _PyCriticalSection2_Begin(_PyCriticalSection2 *c, PyMutex *m1, PyMutex *m2) { diff --git a/Include/internal/pycore_gc.h b/Include/internal/pycore_gc.h index 281094df786735..ba8b8e1903f307 100644 --- a/Include/internal/pycore_gc.h +++ b/Include/internal/pycore_gc.h @@ -37,7 +37,15 @@ static inline PyObject* _Py_FROM_GC(PyGC_Head *gc) { } -/* Bit flags for ob_gc_bits (in Py_GIL_DISABLED builds) */ +/* Bit flags for ob_gc_bits (in Py_GIL_DISABLED builds) + * + * Setting the bits requires a relaxed store. The per-object lock must also be + * held, except when the object is only visible to a single thread (e.g. during + * object initialization or destruction). + * + * Reading the bits requires using a relaxed load, but does not require holding + * the per-object lock. + */ #ifdef Py_GIL_DISABLED # define _PyGC_BITS_TRACKED (1) // Tracked by the GC # define _PyGC_BITS_FINALIZED (2) // tp_finalize was called @@ -48,10 +56,34 @@ static inline PyObject* _Py_FROM_GC(PyGC_Head *gc) { # define _PyGC_BITS_DEFERRED (64) // Use deferred reference counting #endif +#ifdef Py_GIL_DISABLED + +static inline void +_PyObject_SET_GC_BITS(PyObject *op, uint8_t new_bits) +{ + uint8_t bits = _Py_atomic_load_uint8_relaxed(&op->ob_gc_bits); + _Py_atomic_store_uint8_relaxed(&op->ob_gc_bits, bits | new_bits); +} + +static inline int +_PyObject_HAS_GC_BITS(PyObject *op, uint8_t bits) +{ + return (_Py_atomic_load_uint8_relaxed(&op->ob_gc_bits) & bits) != 0; +} + +static inline void +_PyObject_CLEAR_GC_BITS(PyObject *op, uint8_t bits_to_clear) +{ + uint8_t bits = _Py_atomic_load_uint8_relaxed(&op->ob_gc_bits); + _Py_atomic_store_uint8_relaxed(&op->ob_gc_bits, bits & ~bits_to_clear); +} + +#endif + /* True if the object is currently tracked by the GC. */ static inline int _PyObject_GC_IS_TRACKED(PyObject *op) { #ifdef Py_GIL_DISABLED - return (op->ob_gc_bits & _PyGC_BITS_TRACKED) != 0; + return _PyObject_HAS_GC_BITS(op, _PyGC_BITS_TRACKED); #else PyGC_Head *gc = _Py_AS_GC(op); return (gc->_gc_next != 0); @@ -80,12 +112,12 @@ static inline int _PyObject_GC_MAY_BE_TRACKED(PyObject *obj) { * for calling _PyMem_FreeDelayed on the referenced * memory. */ static inline int _PyObject_GC_IS_SHARED(PyObject *op) { - return (op->ob_gc_bits & _PyGC_BITS_SHARED) != 0; + return _PyObject_HAS_GC_BITS(op, _PyGC_BITS_SHARED); } #define _PyObject_GC_IS_SHARED(op) _PyObject_GC_IS_SHARED(_Py_CAST(PyObject*, op)) static inline void _PyObject_GC_SET_SHARED(PyObject *op) { - op->ob_gc_bits |= _PyGC_BITS_SHARED; + _PyObject_SET_GC_BITS(op, _PyGC_BITS_SHARED); } #define _PyObject_GC_SET_SHARED(op) _PyObject_GC_SET_SHARED(_Py_CAST(PyObject*, op)) @@ -95,13 +127,13 @@ static inline void _PyObject_GC_SET_SHARED(PyObject *op) { * Objects with this bit that are GC objects will automatically * delay-freed by PyObject_GC_Del. */ static inline int _PyObject_GC_IS_SHARED_INLINE(PyObject *op) { - return (op->ob_gc_bits & _PyGC_BITS_SHARED_INLINE) != 0; + return _PyObject_HAS_GC_BITS(op, _PyGC_BITS_SHARED_INLINE); } #define _PyObject_GC_IS_SHARED_INLINE(op) \ _PyObject_GC_IS_SHARED_INLINE(_Py_CAST(PyObject*, op)) static inline void _PyObject_GC_SET_SHARED_INLINE(PyObject *op) { - op->ob_gc_bits |= _PyGC_BITS_SHARED_INLINE; + _PyObject_SET_GC_BITS(op, _PyGC_BITS_SHARED_INLINE); } #define _PyObject_GC_SET_SHARED_INLINE(op) \ _PyObject_GC_SET_SHARED_INLINE(_Py_CAST(PyObject*, op)) @@ -178,7 +210,7 @@ static inline void _PyGCHead_SET_PREV(PyGC_Head *gc, PyGC_Head *prev) { static inline int _PyGC_FINALIZED(PyObject *op) { #ifdef Py_GIL_DISABLED - return (op->ob_gc_bits & _PyGC_BITS_FINALIZED) != 0; + return _PyObject_HAS_GC_BITS(op, _PyGC_BITS_FINALIZED); #else PyGC_Head *gc = _Py_AS_GC(op); return ((gc->_gc_prev & _PyGC_PREV_MASK_FINALIZED) != 0); @@ -186,7 +218,7 @@ static inline int _PyGC_FINALIZED(PyObject *op) { } static inline void _PyGC_SET_FINALIZED(PyObject *op) { #ifdef Py_GIL_DISABLED - op->ob_gc_bits |= _PyGC_BITS_FINALIZED; + _PyObject_SET_GC_BITS(op, _PyGC_BITS_FINALIZED); #else PyGC_Head *gc = _Py_AS_GC(op); gc->_gc_prev |= _PyGC_PREV_MASK_FINALIZED; @@ -194,7 +226,7 @@ static inline void _PyGC_SET_FINALIZED(PyObject *op) { } static inline void _PyGC_CLEAR_FINALIZED(PyObject *op) { #ifdef Py_GIL_DISABLED - op->ob_gc_bits &= ~_PyGC_BITS_FINALIZED; + _PyObject_CLEAR_GC_BITS(op, _PyGC_BITS_FINALIZED); #else PyGC_Head *gc = _Py_AS_GC(op); gc->_gc_prev &= ~_PyGC_PREV_MASK_FINALIZED; @@ -315,15 +347,11 @@ struct _gc_runtime_state { /* gh-117783: Deferred reference counting is not fully implemented yet, so as a temporary measure we treat objects using deferred referenence - counting as immortal. */ - struct { - /* Immortalize objects instead of marking them as using deferred - reference counting. */ - int enabled; - - /* Set enabled=1 when the first background thread is created. */ - int enable_on_thread_created; - } immortalize; + counting as immortal. The value may be zero, one, or a negative number: + 0: immortalize deferred RC objects once the first thread is created + 1: immortalize all deferred RC objects immediately + <0: suppressed; don't immortalize objects */ + int immortalize; #endif }; diff --git a/Include/internal/pycore_global_objects_fini_generated.h b/Include/internal/pycore_global_objects_fini_generated.h index ca7355b2b61aa7..a40f007bdacc43 100644 --- a/Include/internal/pycore_global_objects_fini_generated.h +++ b/Include/internal/pycore_global_objects_fini_generated.h @@ -828,6 +828,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(c_call)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(c_exception)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(c_return)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(cached_datetime_module)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(cached_statements)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(cadata)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(cafile)); @@ -1221,6 +1222,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(sort)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(source)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(source_traceback)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(spam)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(src)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(src_dir_fd)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(stacklevel)); diff --git a/Include/internal/pycore_global_strings.h b/Include/internal/pycore_global_strings.h index fbb25285f0f282..eaf81e4f115159 100644 --- a/Include/internal/pycore_global_strings.h +++ b/Include/internal/pycore_global_strings.h @@ -317,6 +317,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(c_call) STRUCT_FOR_ID(c_exception) STRUCT_FOR_ID(c_return) + STRUCT_FOR_ID(cached_datetime_module) STRUCT_FOR_ID(cached_statements) STRUCT_FOR_ID(cadata) STRUCT_FOR_ID(cafile) @@ -710,6 +711,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(sort) STRUCT_FOR_ID(source) STRUCT_FOR_ID(source_traceback) + STRUCT_FOR_ID(spam) STRUCT_FOR_ID(src) STRUCT_FOR_ID(src_dir_fd) STRUCT_FOR_ID(stacklevel) diff --git a/Include/internal/pycore_import.h b/Include/internal/pycore_import.h index bd40707fed21a8..3806e0d3cd0a95 100644 --- a/Include/internal/pycore_import.h +++ b/Include/internal/pycore_import.h @@ -20,7 +20,7 @@ PyAPI_FUNC(int) _PyImport_SetModule(PyObject *name, PyObject *module); extern int _PyImport_SetModuleString(const char *name, PyObject* module); extern void _PyImport_AcquireLock(PyInterpreterState *interp); -extern int _PyImport_ReleaseLock(PyInterpreterState *interp); +extern void _PyImport_ReleaseLock(PyInterpreterState *interp); // This is used exclusively for the sys and builtins modules: extern int _PyImport_FixupBuiltin( @@ -94,11 +94,7 @@ struct _import_state { #endif PyObject *import_func; /* The global import lock. */ - struct { - PyThread_type_lock mutex; - unsigned long thread; - int level; - } lock; + _PyRecursiveMutex lock; /* diagnostic info in PyImport_ImportModuleLevelObject() */ struct { int import_level; @@ -123,11 +119,6 @@ struct _import_state { #define IMPORTS_INIT \ { \ DLOPENFLAGS_INIT \ - .lock = { \ - .mutex = NULL, \ - .thread = PYTHREAD_INVALID_THREAD_ID, \ - .level = 0, \ - }, \ .find_and_load = { \ .header = 1, \ }, \ @@ -180,11 +171,6 @@ extern void _PyImport_FiniCore(PyInterpreterState *interp); extern void _PyImport_FiniExternal(PyInterpreterState *interp); -#ifdef HAVE_FORK -extern PyStatus _PyImport_ReInitLock(PyInterpreterState *interp); -#endif - - extern PyObject* _PyImport_GetBuiltinModuleNames(void); struct _module_alias { diff --git a/Include/internal/pycore_lock.h b/Include/internal/pycore_lock.h index a5b28e4bd4744e..d5853b2c9ff464 100644 --- a/Include/internal/pycore_lock.h +++ b/Include/internal/pycore_lock.h @@ -219,6 +219,18 @@ _PyOnceFlag_CallOnce(_PyOnceFlag *flag, _Py_once_fn_t *fn, void *arg) return _PyOnceFlag_CallOnceSlow(flag, fn, arg); } +// A recursive mutex. The mutex should zero-initialized. +typedef struct { + PyMutex mutex; + unsigned long long thread; // i.e., PyThread_get_thread_ident_ex() + size_t level; +} _PyRecursiveMutex; + +PyAPI_FUNC(int) _PyRecursiveMutex_IsLockedByCurrentThread(_PyRecursiveMutex *m); +PyAPI_FUNC(void) _PyRecursiveMutex_Lock(_PyRecursiveMutex *m); +PyAPI_FUNC(void) _PyRecursiveMutex_Unlock(_PyRecursiveMutex *m); + + // A readers-writer (RW) lock. The lock supports multiple concurrent readers or // a single writer. The lock is write-preferring: if a writer is waiting while // the lock is read-locked then, new readers will be blocked. This avoids diff --git a/Include/internal/pycore_long.h b/Include/internal/pycore_long.h index f04f66d053bab9..8513695c22e703 100644 --- a/Include/internal/pycore_long.h +++ b/Include/internal/pycore_long.h @@ -47,17 +47,6 @@ extern "C" { # error "_PY_LONG_DEFAULT_MAX_STR_DIGITS smaller than threshold." #endif -// _PyLong_NumBits. Return the number of bits needed to represent the -// absolute value of a long. For example, this returns 1 for 1 and -1, 2 -// for 2 and -2, and 2 for 3 and -3. It returns 0 for 0. -// v must not be NULL, and must be a normalized long. -// (size_t)-1 is returned and OverflowError set if the true result doesn't -// fit in a size_t. -// -// Export for 'math' shared extension. -PyAPI_FUNC(size_t) _PyLong_NumBits(PyObject *v); - - /* runtime lifecycle */ extern PyStatus _PyLong_InitTypes(PyInterpreterState *); diff --git a/Include/internal/pycore_mimalloc.h b/Include/internal/pycore_mimalloc.h index 10d451398f1410..d10b01d5b49b19 100644 --- a/Include/internal/pycore_mimalloc.h +++ b/Include/internal/pycore_mimalloc.h @@ -36,9 +36,9 @@ typedef enum { # define MI_TSAN 1 #endif -#include "mimalloc.h" -#include "mimalloc/types.h" -#include "mimalloc/internal.h" +#include "mimalloc/mimalloc.h" +#include "mimalloc/mimalloc/types.h" +#include "mimalloc/mimalloc/internal.h" #endif #ifdef Py_GIL_DISABLED @@ -52,6 +52,7 @@ struct _mimalloc_thread_state { mi_heap_t *current_object_heap; mi_heap_t heaps[_Py_MIMALLOC_HEAP_COUNT]; mi_tld_t tld; + int initialized; struct llist_node page_list; }; #endif diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h index f15c332a7b0811..6f133014ce06e2 100644 --- a/Include/internal/pycore_object.h +++ b/Include/internal/pycore_object.h @@ -168,7 +168,7 @@ static inline int _PyObject_HasDeferredRefcount(PyObject *op) { #ifdef Py_GIL_DISABLED - return (op->ob_gc_bits & _PyGC_BITS_DEFERRED) != 0; + return _PyObject_HAS_GC_BITS(op, _PyGC_BITS_DEFERRED); #else return 0; #endif @@ -320,7 +320,7 @@ static inline void _PyObject_GC_TRACK( "object already tracked by the garbage collector", filename, lineno, __func__); #ifdef Py_GIL_DISABLED - op->ob_gc_bits |= _PyGC_BITS_TRACKED; + _PyObject_SET_GC_BITS(op, _PyGC_BITS_TRACKED); #else PyGC_Head *gc = _Py_AS_GC(op); _PyObject_ASSERT_FROM(op, @@ -361,7 +361,7 @@ static inline void _PyObject_GC_UNTRACK( filename, lineno, __func__); #ifdef Py_GIL_DISABLED - op->ob_gc_bits &= ~_PyGC_BITS_TRACKED; + _PyObject_CLEAR_GC_BITS(op, _PyGC_BITS_TRACKED); #else PyGC_Head *gc = _Py_AS_GC(op); PyGC_Head *prev = _PyGCHead_PREV(gc); @@ -497,6 +497,9 @@ _Py_NewRefWithLock(PyObject *op) if (_Py_TryIncrefFast(op)) { return op; } +#ifdef Py_REF_DEBUG + _Py_IncRefTotal(_PyThreadState_GET()); +#endif _Py_INCREF_STAT_INC(); for (;;) { Py_ssize_t shared = _Py_atomic_load_ssize_relaxed(&op->ob_ref_shared); @@ -586,7 +589,7 @@ _PyObject_GET_WEAKREFS_LISTPTR(PyObject *op) if (PyType_Check(op) && ((PyTypeObject *)op)->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { PyInterpreterState *interp = _PyInterpreterState_GET(); - static_builtin_state *state = _PyStaticType_GetState( + managed_static_type_state *state = _PyStaticType_GetState( interp, (PyTypeObject *)op); return _PyStaticType_GET_WEAKREFS_LISTPTR(state); } diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h index 2a237bc6dd8ee5..5b0d8dfd1b0b97 100644 --- a/Include/internal/pycore_opcode_metadata.h +++ b/Include/internal/pycore_opcode_metadata.h @@ -1060,8 +1060,8 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[268] = { [INSTRUMENTED_CALL] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [INSTRUMENTED_CALL_FUNCTION_EX] = { true, INSTR_FMT_IX, 0 }, [INSTRUMENTED_CALL_KW] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [INSTRUMENTED_END_FOR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, - [INSTRUMENTED_END_SEND] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, + [INSTRUMENTED_END_FOR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG }, + [INSTRUMENTED_END_SEND] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG }, [INSTRUMENTED_FOR_ITER] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [INSTRUMENTED_INSTRUCTION] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [INSTRUMENTED_JUMP_BACKWARD] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG }, @@ -1299,7 +1299,6 @@ _PyOpcode_macro_expansion[256] = { [LOAD_FAST_CHECK] = { .nuops = 1, .uops = { { _LOAD_FAST_CHECK, 0, 0 } } }, [LOAD_FAST_LOAD_FAST] = { .nuops = 2, .uops = { { _LOAD_FAST, 5, 0 }, { _LOAD_FAST, 6, 0 } } }, [LOAD_FROM_DICT_OR_DEREF] = { .nuops = 1, .uops = { { _LOAD_FROM_DICT_OR_DEREF, 0, 0 } } }, - [LOAD_FROM_DICT_OR_GLOBALS] = { .nuops = 1, .uops = { { _LOAD_FROM_DICT_OR_GLOBALS, 0, 0 } } }, [LOAD_GLOBAL] = { .nuops = 1, .uops = { { _LOAD_GLOBAL, 0, 0 } } }, [LOAD_GLOBAL_BUILTIN] = { .nuops = 3, .uops = { { _GUARD_GLOBALS_VERSION, 1, 1 }, { _GUARD_BUILTINS_VERSION, 1, 2 }, { _LOAD_GLOBAL_BUILTINS, 1, 3 } } }, [LOAD_GLOBAL_MODULE] = { .nuops = 2, .uops = { { _GUARD_GLOBALS_VERSION, 1, 1 }, { _LOAD_GLOBAL_MODULE, 1, 3 } } }, diff --git a/Include/internal/pycore_qsbr.h b/Include/internal/pycore_qsbr.h index c3680a205542f7..20e643e172b38d 100644 --- a/Include/internal/pycore_qsbr.h +++ b/Include/internal/pycore_qsbr.h @@ -140,7 +140,7 @@ _Py_qsbr_register(struct _PyThreadStateImpl *tstate, // Disassociates a PyThreadState from the QSBR state and frees the QSBR state. extern void -_Py_qsbr_unregister(struct _PyThreadStateImpl *tstate); +_Py_qsbr_unregister(PyThreadState *tstate); extern void _Py_qsbr_fini(PyInterpreterState *interp); diff --git a/Include/internal/pycore_runtime_init_generated.h b/Include/internal/pycore_runtime_init_generated.h index 508da40c53422d..b18c2684691ce2 100644 --- a/Include/internal/pycore_runtime_init_generated.h +++ b/Include/internal/pycore_runtime_init_generated.h @@ -826,6 +826,7 @@ extern "C" { INIT_ID(c_call), \ INIT_ID(c_exception), \ INIT_ID(c_return), \ + INIT_ID(cached_datetime_module), \ INIT_ID(cached_statements), \ INIT_ID(cadata), \ INIT_ID(cafile), \ @@ -1219,6 +1220,7 @@ extern "C" { INIT_ID(sort), \ INIT_ID(source), \ INIT_ID(source_traceback), \ + INIT_ID(spam), \ INIT_ID(src), \ INIT_ID(src_dir_fd), \ INIT_ID(stacklevel), \ diff --git a/Include/internal/pycore_symtable.h b/Include/internal/pycore_symtable.h index 16e89f80d9d0c8..ac6c499c08264e 100644 --- a/Include/internal/pycore_symtable.h +++ b/Include/internal/pycore_symtable.h @@ -81,6 +81,7 @@ typedef struct _symtable_entry { PyObject *ste_varnames; /* list of function parameters */ PyObject *ste_children; /* list of child blocks */ PyObject *ste_directives;/* locations of global and nonlocal statements */ + PyObject *ste_mangled_names; /* set of names for which mangling should be applied */ _Py_block_ty ste_type; int ste_nested; /* true if block is nested */ unsigned ste_free : 1; /* true if block has free variables */ @@ -128,6 +129,7 @@ extern PySTEntryObject* _PySymtable_Lookup(struct symtable *, void *); extern void _PySymtable_Free(struct symtable *); +extern PyObject *_Py_MaybeMangle(PyObject *privateobj, PySTEntryObject *ste, PyObject *name); extern PyObject* _Py_Mangle(PyObject *p, PyObject *name); /* Flags for def-use information */ diff --git a/Include/internal/pycore_typeobject.h b/Include/internal/pycore_typeobject.h index 7e533bd138469b..8664ae0e44533f 100644 --- a/Include/internal/pycore_typeobject.h +++ b/Include/internal/pycore_typeobject.h @@ -44,10 +44,12 @@ struct type_cache { /* For now we hard-code this to a value for which we are confident all the static builtin types will fit (for all builds). */ -#define _Py_MAX_STATIC_BUILTIN_TYPES 200 +#define _Py_MAX_MANAGED_STATIC_BUILTIN_TYPES 200 +#define _Py_MAX_MANAGED_STATIC_EXT_TYPES 10 typedef struct { PyTypeObject *type; + int isbuiltin; int readying; int ready; // XXX tp_dict can probably be statically allocated, @@ -59,7 +61,7 @@ typedef struct { are also some diagnostic uses for the list of weakrefs, so we still keep it. */ PyObject *tp_weaklist; -} static_builtin_state; +} managed_static_type_state; struct types_state { /* Used to set PyTypeObject.tp_version_tag. @@ -105,8 +107,16 @@ struct types_state { num_builtins_initialized is incremented once for each static builtin type. Once initialization is over for a subinterpreter, the value will be the same as for all other interpreters. */ - size_t num_builtins_initialized; - static_builtin_state builtins[_Py_MAX_STATIC_BUILTIN_TYPES]; + struct { + size_t num_initialized; + managed_static_type_state initialized[_Py_MAX_MANAGED_STATIC_BUILTIN_TYPES]; + } builtins; + /* We apply a similar strategy for managed extension modules. */ + struct { + size_t num_initialized; + size_t next_index; + managed_static_type_state initialized[_Py_MAX_MANAGED_STATIC_EXT_TYPES]; + } for_extensions; PyMutex mutex; }; @@ -130,12 +140,35 @@ typedef struct wrapperbase pytype_slotdef; static inline PyObject ** -_PyStaticType_GET_WEAKREFS_LISTPTR(static_builtin_state *state) +_PyStaticType_GET_WEAKREFS_LISTPTR(managed_static_type_state *state) { assert(state != NULL); return &state->tp_weaklist; } +extern int _PyStaticType_InitBuiltin( + PyInterpreterState *interp, + PyTypeObject *type); +extern void _PyStaticType_FiniBuiltin( + PyInterpreterState *interp, + PyTypeObject *type); +extern void _PyStaticType_ClearWeakRefs( + PyInterpreterState *interp, + PyTypeObject *type); +extern managed_static_type_state * _PyStaticType_GetState( + PyInterpreterState *interp, + PyTypeObject *type); + +// Export for '_datetime' shared extension. +PyAPI_FUNC(int) _PyStaticType_InitForExtension( + PyInterpreterState *interp, + PyTypeObject *self); +PyAPI_FUNC(void) _PyStaticType_FiniForExtension( + PyInterpreterState *interp, + PyTypeObject *self, + int final); + + /* Like PyType_GetModuleState, but skips verification * that type is a heap type with an associated module */ static inline void * @@ -151,11 +184,6 @@ _PyType_GetModuleState(PyTypeObject *type) } -extern int _PyStaticType_InitBuiltin(PyInterpreterState *, PyTypeObject *type); -extern static_builtin_state * _PyStaticType_GetState(PyInterpreterState *, PyTypeObject *); -extern void _PyStaticType_ClearWeakRefs(PyInterpreterState *, PyTypeObject *type); -extern void _PyStaticType_Dealloc(PyInterpreterState *, PyTypeObject *); - // Export for 'math' shared extension, used via _PyType_IsReady() static inline // function PyAPI_FUNC(PyObject *) _PyType_GetDict(PyTypeObject *); diff --git a/Include/internal/pycore_typevarobject.h b/Include/internal/pycore_typevarobject.h index 80a2daf4efc16a..a368edebd622a1 100644 --- a/Include/internal/pycore_typevarobject.h +++ b/Include/internal/pycore_typevarobject.h @@ -18,6 +18,7 @@ extern int _Py_initialize_generic(PyInterpreterState *); extern void _Py_clear_generic_types(PyInterpreterState *); extern PyTypeObject _PyTypeAlias_Type; +extern PyTypeObject _PyNoDefault_Type; extern PyObject _Py_NoDefaultStruct; #ifdef __cplusplus diff --git a/Include/internal/pycore_unicodeobject_generated.h b/Include/internal/pycore_unicodeobject_generated.h index cc2fc15ac5cabf..87e22ac39c1fef 100644 --- a/Include/internal/pycore_unicodeobject_generated.h +++ b/Include/internal/pycore_unicodeobject_generated.h @@ -792,6 +792,9 @@ _PyUnicode_InitStaticStrings(PyInterpreterState *interp) { string = &_Py_ID(c_return); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); + string = &_Py_ID(cached_datetime_module); + assert(_PyUnicode_CheckConsistency(string, 1)); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(cached_statements); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); @@ -1971,6 +1974,9 @@ _PyUnicode_InitStaticStrings(PyInterpreterState *interp) { string = &_Py_ID(source_traceback); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); + string = &_Py_ID(spam); + assert(_PyUnicode_CheckConsistency(string, 1)); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(src); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); diff --git a/Include/internal/pycore_uop_metadata.h b/Include/internal/pycore_uop_metadata.h index 470e95e2b3b041..bc0ba16e24fe98 100644 --- a/Include/internal/pycore_uop_metadata.h +++ b/Include/internal/pycore_uop_metadata.h @@ -107,7 +107,6 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_STORE_GLOBAL] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_DELETE_GLOBAL] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, [_LOAD_LOCALS] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_LOAD_FROM_DICT_OR_GLOBALS] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, [_LOAD_GLOBAL] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_GUARD_GLOBALS_VERSION] = HAS_DEOPT_FLAG, [_GUARD_BUILTINS_VERSION] = HAS_DEOPT_FLAG, @@ -438,7 +437,6 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = { [_LOAD_FAST_CHECK] = "_LOAD_FAST_CHECK", [_LOAD_FAST_LOAD_FAST] = "_LOAD_FAST_LOAD_FAST", [_LOAD_FROM_DICT_OR_DEREF] = "_LOAD_FROM_DICT_OR_DEREF", - [_LOAD_FROM_DICT_OR_GLOBALS] = "_LOAD_FROM_DICT_OR_GLOBALS", [_LOAD_GLOBAL] = "_LOAD_GLOBAL", [_LOAD_GLOBAL_BUILTINS] = "_LOAD_GLOBAL_BUILTINS", [_LOAD_GLOBAL_MODULE] = "_LOAD_GLOBAL_MODULE", @@ -690,8 +688,6 @@ int _PyUop_num_popped(int opcode, int oparg) return 0; case _LOAD_LOCALS: return 0; - case _LOAD_FROM_DICT_OR_GLOBALS: - return 1; case _LOAD_GLOBAL: return 0; case _GUARD_GLOBALS_VERSION: diff --git a/Include/internal/pycore_weakref.h b/Include/internal/pycore_weakref.h index e057a27340f718..cc6c7ff9a9b438 100644 --- a/Include/internal/pycore_weakref.h +++ b/Include/internal/pycore_weakref.h @@ -111,8 +111,6 @@ extern Py_ssize_t _PyWeakref_GetWeakrefCount(PyObject *obj); // intact. extern void _PyWeakref_ClearWeakRefsExceptCallbacks(PyObject *obj); -extern void _PyWeakref_ClearRef(PyWeakReference *self); - PyAPI_FUNC(int) _PyWeakref_IsDead(PyObject *weakref); #ifdef __cplusplus diff --git a/Include/patchlevel.h b/Include/patchlevel.h index 179c9f50214935..87b4f551bc5fbd 100644 --- a/Include/patchlevel.h +++ b/Include/patchlevel.h @@ -20,10 +20,10 @@ #define PY_MINOR_VERSION 13 #define PY_MICRO_VERSION 0 #define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_BETA -#define PY_RELEASE_SERIAL 1 +#define PY_RELEASE_SERIAL 2 /* Version as a string */ -#define PY_VERSION "3.13.0b1" +#define PY_VERSION "3.13.0b2+" /*--end constants--*/ /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2. diff --git a/Lib/_ios_support.py b/Lib/_ios_support.py index db3fe23e45bca0..20467a7c2bcaeb 100644 --- a/Lib/_ios_support.py +++ b/Lib/_ios_support.py @@ -5,7 +5,7 @@ # ctypes is an optional module. If it's not present, we're limited in what # we can tell about the system, but we don't want to prevent the module # from working. - print("ctypes isn't available; iOS system calls will not be available") + print("ctypes isn't available; iOS system calls will not be available", file=sys.stderr) objc = None else: # ctypes is available. Load the ObjC library, and wrap the objc_getClass, @@ -13,7 +13,7 @@ lib = util.find_library("objc") if lib is None: # Failed to load the objc library - raise RuntimeError("ObjC runtime library couldn't be loaded") + raise ImportError("ObjC runtime library couldn't be loaded") objc = cdll.LoadLibrary(lib) objc.objc_getClass.restype = c_void_p diff --git a/Lib/_pyrepl/__main__.py b/Lib/_pyrepl/__main__.py index c598019e7cd4ad..dae4ba6e178b9a 100644 --- a/Lib/_pyrepl/__main__.py +++ b/Lib/_pyrepl/__main__.py @@ -1,7 +1,11 @@ import os import sys -CAN_USE_PYREPL = sys.platform != "win32" +CAN_USE_PYREPL: bool +if sys.platform != "win32": + CAN_USE_PYREPL = True +else: + CAN_USE_PYREPL = sys.getwindowsversion().build >= 10586 # Windows 10 TH2 def interactive_console(mainmodule=None, quiet=False, pythonstartup=False): diff --git a/Lib/_pyrepl/_minimal_curses.py b/Lib/_pyrepl/_minimal_curses.py index 0757fb2c664add..849617bf7585e4 100644 --- a/Lib/_pyrepl/_minimal_curses.py +++ b/Lib/_pyrepl/_minimal_curses.py @@ -17,7 +17,7 @@ class error(Exception): pass -def _find_clib(): +def _find_clib() -> str: trylibs = ["ncursesw", "ncurses", "curses"] for lib in trylibs: diff --git a/Lib/_pyrepl/commands.py b/Lib/_pyrepl/commands.py index 456cba0769c952..c3fce91013b001 100644 --- a/Lib/_pyrepl/commands.py +++ b/Lib/_pyrepl/commands.py @@ -34,9 +34,7 @@ # types if False: - from .reader import Reader from .historical_reader import HistoricalReader - from .console import Event class Command: @@ -218,9 +216,17 @@ def do(self) -> None: import signal self.reader.console.finish() + self.reader.finish() os.kill(os.getpid(), signal.SIGINT) +class ctrl_c(Command): + def do(self) -> None: + self.reader.console.finish() + self.reader.finish() + raise KeyboardInterrupt + + class suspend(Command): def do(self) -> None: import signal @@ -245,7 +251,7 @@ def do(self) -> None: x, y = r.pos2xy() new_y = y - 1 - if new_y < 0: + if r.bol() == 0: if r.historyi > 0: r.select_item(r.historyi - 1) return @@ -360,7 +366,8 @@ def do(self) -> None: class self_insert(EditCommand): def do(self) -> None: r = self.reader - r.insert(self.event * r.get_arg()) + text = self.event * r.get_arg() + r.insert(text) class insert_nl(EditCommand): @@ -460,8 +467,6 @@ def do(self) -> None: class paste_mode(Command): def do(self) -> None: - if not self.reader.paste_mode: - self.reader.was_paste_mode_activated = True self.reader.paste_mode = not self.reader.paste_mode self.reader.dirty = True @@ -469,9 +474,10 @@ def do(self) -> None: class enable_bracketed_paste(Command): def do(self) -> None: self.reader.paste_mode = True - self.reader.was_paste_mode_activated = True + self.reader.in_bracketed_paste = True class disable_bracketed_paste(Command): def do(self) -> None: self.reader.paste_mode = False + self.reader.in_bracketed_paste = False self.reader.dirty = True diff --git a/Lib/_pyrepl/completing_reader.py b/Lib/_pyrepl/completing_reader.py index 19fc06feaf3ced..05770aaf5060cc 100644 --- a/Lib/_pyrepl/completing_reader.py +++ b/Lib/_pyrepl/completing_reader.py @@ -30,7 +30,7 @@ # types Command = commands.Command if False: - from .types import Callback, SimpleContextManager, KeySpec, CommandName + from .types import KeySpec, CommandName def prefix(wordlist: list[str], j: int = 0) -> str: @@ -187,18 +187,20 @@ def do(self) -> None: if p: r.insert(p) if last_is_completer: - if not r.cmpltn_menu_vis: - r.cmpltn_menu_vis = 1 + r.cmpltn_menu_visible = True + r.cmpltn_message_visible = False r.cmpltn_menu, r.cmpltn_menu_end = build_menu( r.console, completions, r.cmpltn_menu_end, r.use_brackets, r.sort_in_column) r.dirty = True - elif stem + p in completions: - r.msg = "[ complete but not unique ]" - r.dirty = True - else: - r.msg = "[ not unique ]" - r.dirty = True + elif not r.cmpltn_menu_visible: + r.cmpltn_message_visible = True + if stem + p in completions: + r.msg = "[ complete but not unique ]" + r.dirty = True + else: + r.msg = "[ not unique ]" + r.dirty = True class self_insert(commands.self_insert): @@ -207,8 +209,7 @@ def do(self) -> None: r = self.reader # type: ignore[assignment] commands.self_insert.do(self) - - if r.cmpltn_menu_vis: + if r.cmpltn_menu_visible: stem = r.get_stem() if len(stem) < 1: r.cmpltn_reset() @@ -235,7 +236,8 @@ class CompletingReader(Reader): ### Instance variables cmpltn_menu: list[str] = field(init=False) - cmpltn_menu_vis: int = field(init=False) + cmpltn_menu_visible: bool = field(init=False) + cmpltn_message_visible: bool = field(init=False) cmpltn_menu_end: int = field(init=False) cmpltn_menu_choices: list[str] = field(init=False) @@ -257,7 +259,7 @@ def after_command(self, cmd: Command) -> None: def calc_screen(self) -> list[str]: screen = super().calc_screen() - if self.cmpltn_menu_vis: + if self.cmpltn_menu_visible: ly = self.lxy[1] screen[ly:ly] = self.cmpltn_menu self.screeninfo[ly:ly] = [(0, [])]*len(self.cmpltn_menu) @@ -270,7 +272,8 @@ def finish(self) -> None: def cmpltn_reset(self) -> None: self.cmpltn_menu = [] - self.cmpltn_menu_vis = 0 + self.cmpltn_menu_visible = False + self.cmpltn_message_visible = False self.cmpltn_menu_end = 0 self.cmpltn_menu_choices = [] diff --git a/Lib/_pyrepl/console.py b/Lib/_pyrepl/console.py index d7e86e768671dc..a8d3f520340dcf 100644 --- a/Lib/_pyrepl/console.py +++ b/Lib/_pyrepl/console.py @@ -19,8 +19,21 @@ from __future__ import annotations +import _colorize # type: ignore[import-not-found] + from abc import ABC, abstractmethod +import ast +import code from dataclasses import dataclass, field +import os.path +import sys + + +TYPE_CHECKING = False + +if TYPE_CHECKING: + from typing import IO + from typing import Callable @dataclass @@ -36,6 +49,25 @@ class Console(ABC): height: int = 25 width: int = 80 + def __init__( + self, + f_in: IO[bytes] | int = 0, + f_out: IO[bytes] | int = 1, + term: str = "", + encoding: str = "", + ): + self.encoding = encoding or sys.getdefaultencoding() + + if isinstance(f_in, int): + self.input_fd = f_in + else: + self.input_fd = f_in.fileno() + + if isinstance(f_out, int): + self.output_fd = f_out + else: + self.output_fd = f_out.fileno() + @abstractmethod def refresh(self, screen: list[str], xy: tuple[int, int]) -> None: ... @@ -103,10 +135,67 @@ def getpending(self) -> Event: ... @abstractmethod - def wait(self) -> None: - """Wait for an event.""" + def wait(self, timeout: float | None) -> bool: + """Wait for an event. The return value is True if an event is + available, False if the timeout has been reached. If timeout is + None, wait forever. The timeout is in milliseconds.""" ... - @abstractmethod - def repaint(self) -> None: + @property + def input_hook(self) -> Callable[[], int] | None: + """Returns the current input hook.""" ... + + @abstractmethod + def repaint(self) -> None: ... + + +class InteractiveColoredConsole(code.InteractiveConsole): + def __init__( + self, + locals: dict[str, object] | None = None, + filename: str = "", + *, + local_exit: bool = False, + ) -> None: + super().__init__(locals=locals, filename=filename, local_exit=local_exit) # type: ignore[call-arg] + self.can_colorize = _colorize.can_colorize() + + def showsyntaxerror(self, filename=None): + super().showsyntaxerror(colorize=self.can_colorize) + + def showtraceback(self): + super().showtraceback(colorize=self.can_colorize) + + def runsource(self, source, filename="", symbol="single"): + try: + tree = ast.parse(source) + except (SyntaxError, OverflowError, ValueError): + self.showsyntaxerror(filename) + return False + if tree.body: + *_, last_stmt = tree.body + for stmt in tree.body: + wrapper = ast.Interactive if stmt is last_stmt else ast.Module + the_symbol = symbol if stmt is last_stmt else "exec" + item = wrapper([stmt]) + try: + code = self.compile.compiler(item, filename, the_symbol, dont_inherit=True) + except SyntaxError as e: + if e.args[0] == "'await' outside function": + python = os.path.basename(sys.executable) + e.add_note( + f"Try the asyncio REPL ({python} -m asyncio) to use" + f" top-level 'await' and run background asyncio tasks." + ) + self.showsyntaxerror(filename) + return False + except (OverflowError, ValueError): + self.showsyntaxerror(filename) + return False + + if code is None: + return True + + self.runcode(code) + return False diff --git a/Lib/_pyrepl/historical_reader.py b/Lib/_pyrepl/historical_reader.py index eef7d901b083ef..121de33da5052f 100644 --- a/Lib/_pyrepl/historical_reader.py +++ b/Lib/_pyrepl/historical_reader.py @@ -259,7 +259,7 @@ def select_item(self, i: int) -> None: self.transient_history[self.historyi] = self.get_unicode() buf = self.transient_history.get(i) if buf is None: - buf = self.history[i] + buf = self.history[i].rstrip() self.buffer = list(buf) self.historyi = i self.pos = len(self.buffer) diff --git a/Lib/_pyrepl/input.py b/Lib/_pyrepl/input.py index 300e16d1d25441..21c24eb5cde3e3 100644 --- a/Lib/_pyrepl/input.py +++ b/Lib/_pyrepl/input.py @@ -60,7 +60,7 @@ def empty(self) -> bool: class KeymapTranslator(InputTranslator): - def __init__(self, keymap, verbose=0, invalid_cls=None, character_cls=None): + def __init__(self, keymap, verbose=False, invalid_cls=None, character_cls=None): self.verbose = verbose from .keymap import compile_keymap, parse_keys @@ -110,5 +110,5 @@ def get(self): else: return None - def empty(self): + def empty(self) -> bool: return not self.results diff --git a/Lib/_pyrepl/keymap.py b/Lib/_pyrepl/keymap.py index 31a02642ce8ceb..2fb03d1952382f 100644 --- a/Lib/_pyrepl/keymap.py +++ b/Lib/_pyrepl/keymap.py @@ -19,38 +19,32 @@ # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. """ -functions for parsing keyspecs +Keymap contains functions for parsing keyspecs and turning keyspecs into +appropriate sequences. -Support for turning keyspecs into appropriate sequences. +A keyspec is a string representing a sequence of key presses that can +be bound to a command. All characters other than the backslash represent +themselves. In the traditional manner, a backslash introduces an escape +sequence. -pyrepl uses it's own bastardized keyspec format, which is meant to be -a strict superset of readline's \"KEYSEQ\" format (which is to say -that if you can come up with a spec readline accepts that this -doesn't, you've found a bug and should tell me about it). - -Note that this is the `\\C-o' style of readline keyspec, not the -`Control-o' sort. - -A keyspec is a string representing a sequence of keypresses that can -be bound to a command. - -All characters other than the backslash represent themselves. In the -traditional manner, a backslash introduces a escape sequence. +pyrepl uses its own keyspec format that is meant to be a strict superset of +readline's KEYSEQ format. This means that if a spec is found that readline +accepts that this doesn't, it should be logged as a bug. Note that this means +we're using the `\\C-o' style of readline's keyspec, not the `Control-o' sort. The extension to readline is that the sequence \\ denotes the -sequence of charaters produced by hitting KEY. +sequence of characters produced by hitting KEY. Examples: - -`a' - what you get when you hit the `a' key +`a' - what you get when you hit the `a' key `\\EOA' - Escape - O - A (up, on my terminal) `\\' - the up arrow key -`\\' - ditto (keynames are case insensitive) +`\\' - ditto (keynames are case-insensitive) `\\C-o', `\\c-o' - control-o `\\M-.' - meta-period `\\E.' - ditto (that's how meta works for pyrepl) `\\', `\\', `\\t', `\\011', '\\x09', '\\X09', '\\C-i', '\\C-I' - - all of these are the tab character. Can you think of any more? + - all of these are the tab character. """ _escapes = { @@ -111,7 +105,17 @@ class KeySpecError(Exception): pass -def _parse_key1(key, s): +def parse_keys(keys: str) -> list[str]: + """Parse keys in keyspec format to a sequence of keys.""" + s = 0 + r: list[str] = [] + while s < len(keys): + k, s = _parse_single_key_sequence(keys, s) + r.extend(k) + return r + + +def _parse_single_key_sequence(key: str, s: int) -> tuple[list[str], int]: ctrl = 0 meta = 0 ret = "" @@ -177,23 +181,17 @@ def _parse_key1(key, s): ret = key[s] s += 1 if ctrl: - if len(ret) > 1: - raise KeySpecError("\\C- must be followed by a character") - ret = chr(ord(ret) & 0x1F) # curses.ascii.ctrl() - if meta: - ret = ["\033", ret] - else: - ret = [ret] - return ret, s - + if len(ret) == 1: + ret = chr(ord(ret) & 0x1F) # curses.ascii.ctrl() + elif ret in {"left", "right"}: + ret = f"ctrl {ret}" + else: + raise KeySpecError("\\C- followed by invalid key") -def parse_keys(key): - s = 0 - r = [] - while s < len(key): - k, s = _parse_key1(key, s) - r.extend(k) - return r + result = [ret], s + if meta: + result[0].insert(0, "\033") + return result def compile_keymap(keymap, empty=b""): diff --git a/Lib/_pyrepl/mypy.ini b/Lib/_pyrepl/mypy.ini index ecd03094dbf538..395f5945ab740b 100644 --- a/Lib/_pyrepl/mypy.ini +++ b/Lib/_pyrepl/mypy.ini @@ -10,18 +10,15 @@ platform = linux pretty = True # Enable most stricter settings -enable_error_code = ignore-without-code +enable_error_code = ignore-without-code,redundant-expr strict = True # Various stricter settings that we can't yet enable # Try to enable these in the following order: -disallow_any_generics = False disallow_untyped_calls = False disallow_untyped_defs = False check_untyped_defs = False -disable_error_code = return - # Various internal modules that typeshed deliberately doesn't have stubs for: [mypy-_abc.*,_opcode.*,_overlapped.*,_testcapi.*,_testinternalcapi.*,test.*] ignore_missing_imports = True diff --git a/Lib/_pyrepl/pager.py b/Lib/_pyrepl/pager.py index af0409c4523bc2..1ac733ed3573a4 100644 --- a/Lib/_pyrepl/pager.py +++ b/Lib/_pyrepl/pager.py @@ -35,7 +35,7 @@ def get_pager() -> Pager: if os.environ.get('TERM') in ('dumb', 'emacs'): return plain_pager if sys.platform == 'win32': - return lambda text, title='': tempfilepager(plain(text), 'more <') + return lambda text, title='': tempfile_pager(plain(text), 'more <') if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0: return lambda text, title='': pipe_pager(text, 'less', title) @@ -76,10 +76,14 @@ def tty_pager(text: str, title: str = '') -> None: fd = sys.stdin.fileno() old = termios.tcgetattr(fd) tty.setcbreak(fd) - getchar = lambda: sys.stdin.read(1) has_tty = True + + def getchar() -> str: + return sys.stdin.read(1) + except (ImportError, AttributeError, io.UnsupportedOperation): - getchar = lambda: sys.stdin.readline()[:-1][:1] + def getchar() -> str: + return sys.stdin.readline()[:-1][:1] try: try: diff --git a/Lib/_pyrepl/reader.py b/Lib/_pyrepl/reader.py index d15a150180811d..3e6c3f3439daaa 100644 --- a/Lib/_pyrepl/reader.py +++ b/Lib/_pyrepl/reader.py @@ -28,7 +28,7 @@ from . import commands, console, input -from .utils import ANSI_ESCAPE_SEQUENCE, wlen +from .utils import ANSI_ESCAPE_SEQUENCE, wlen, str_width from .trace import trace @@ -41,7 +41,7 @@ def disp_str(buffer: str) -> tuple[str, list[int]]: """disp_str(buffer:string) -> (string, [int]) - Return the string that should be the printed represenation of + Return the string that should be the printed representation of |buffer| and a list detailing where the characters of |buffer| get used up. E.g.: @@ -52,11 +52,17 @@ def disp_str(buffer: str) -> tuple[str, list[int]]: b: list[int] = [] s: list[str] = [] for c in buffer: - if unicodedata.category(c).startswith("C"): + if ord(c) < 128: + s.append(c) + b.append(1) + elif unicodedata.category(c).startswith("C"): c = r"\u%04x" % ord(c) - s.append(c) - b.append(wlen(c)) - b.extend([0] * (len(c) - 1)) + s.append(c) + b.append(str_width(c)) + b.extend([0] * (len(c) - 1)) + else: + s.append(c) + b.append(str_width(c)) return "".join(s), b @@ -129,6 +135,7 @@ def make_default_commands() -> dict[CommandName, type[Command]]: ("\\\\", "self-insert"), (r"\x1b[200~", "enable_bracketed_paste"), (r"\x1b[201~", "disable_bracketed_paste"), + (r"\x03", "ctrl-c"), ] + [(c, "self-insert") for c in map(chr, range(32, 127)) if c != "\\"] + [(c, "self-insert") for c in map(chr, range(128, 256)) if c.isalpha()] @@ -136,7 +143,9 @@ def make_default_commands() -> dict[CommandName, type[Command]]: (r"\", "up"), (r"\", "down"), (r"\", "left"), + (r"\C-\", "backward-word"), (r"\", "right"), + (r"\C-\", "forward-word"), (r"\", "delete"), (r"\", "backspace"), (r"\M-\", "backward-kill-word"), @@ -221,17 +230,28 @@ class Reader: dirty: bool = False finished: bool = False paste_mode: bool = False - was_paste_mode_activated: bool = False + in_bracketed_paste: bool = False commands: dict[str, type[Command]] = field(default_factory=make_default_commands) last_command: type[Command] | None = None syntax_table: dict[str, int] = field(default_factory=make_default_syntax_table) - msg_at_bottom: bool = True keymap: tuple[tuple[str, str], ...] = () input_trans: input.KeymapTranslator = field(init=False) input_trans_stack: list[input.KeymapTranslator] = field(default_factory=list) + screen: list[str] = field(default_factory=list) screeninfo: list[tuple[int, list[int]]] = field(init=False) cxy: tuple[int, int] = field(init=False) lxy: tuple[int, int] = field(init=False) + scheduled_commands: list[str] = field(default_factory=list) + can_colorize: bool = False + + ## cached metadata to speed up screen refreshes + last_refresh_in_bracketed_paste: bool = False + last_refresh_screen: list[str] = field(default_factory=list) + last_refresh_screeninfo: list[tuple[int, list[int]]] = field(init=False) + last_refresh_line_end_offsets: list[int] = field(default_factory=list) + last_refresh_pos: int = field(init=False) + last_refresh_cxy: tuple[int, int] = field(init=False) + last_refresh_dimensions: tuple[int, int] = field(init=False) def __post_init__(self) -> None: # Enable the use of `insert` without a `prepare` call - necessary to @@ -241,34 +261,77 @@ def __post_init__(self) -> None: self.input_trans = input.KeymapTranslator( self.keymap, invalid_cls="invalid-key", character_cls="self-insert" ) - self.screeninfo = [(0, [0])] + self.screeninfo = [(0, [])] self.cxy = self.pos2xy() self.lxy = (self.pos, 0) + self.can_colorize = can_colorize() + + self.last_refresh_screeninfo = self.screeninfo + self.last_refresh_pos = self.pos + self.last_refresh_cxy = self.cxy + self.last_refresh_dimensions = (0, 0) def collect_keymap(self) -> tuple[tuple[KeySpec, CommandName], ...]: return default_keymap def calc_screen(self) -> list[str]: - """The purpose of this method is to translate changes in - self.buffer into changes in self.screen. Currently it rips - everything down and starts from scratch, which whilst not - especially efficient is certainly simple(r). - """ - lines = self.get_unicode().split("\n") - screen: list[str] = [] - screeninfo: list[tuple[int, list[int]]] = [] + """Translate changes in self.buffer into changes in self.console.screen.""" + # Since the last call to calc_screen: + # screen and screeninfo may differ due to a completion menu being shown + # pos and cxy may differ due to edits, cursor movements, or completion menus + + # Lines that are above both the old and new cursor position can't have changed, + # unless the terminal has been resized (which might cause reflowing) or we've + # entered or left paste mode (which changes prompts, causing reflowing). + dimensions = self.console.width, self.console.height + dimensions_changed = dimensions != self.last_refresh_dimensions + paste_changed = self.in_bracketed_paste != self.last_refresh_in_bracketed_paste + cache_valid = not (dimensions_changed or paste_changed) + + num_common_lines = 0 + offset = 0 + if cache_valid: + earliest_common_pos = min(self.pos, self.last_refresh_pos) + + num_common_lines = len(self.last_refresh_line_end_offsets) + while num_common_lines > 0: + offset = self.last_refresh_line_end_offsets[num_common_lines - 1] + if earliest_common_pos > offset: + break + num_common_lines -= 1 + else: + offset = 0 + pos = self.pos - for ln, line in enumerate(lines): + pos -= offset + + screen = self.last_refresh_screen + del screen[num_common_lines:] + + screeninfo = self.last_refresh_screeninfo + del screeninfo[num_common_lines:] + + last_refresh_line_end_offsets = self.last_refresh_line_end_offsets + del last_refresh_line_end_offsets[num_common_lines:] + + lines = "".join(self.buffer[offset:]).split("\n") + cursor_found = False + lines_beyond_cursor = 0 + for ln, line in enumerate(lines, num_common_lines): ll = len(line) if 0 <= pos <= ll: - if self.msg and not self.msg_at_bottom: - for mline in self.msg.split("\n"): - screen.append(mline) - screeninfo.append((0, [])) self.lxy = pos, ln + cursor_found = True + elif cursor_found: + lines_beyond_cursor += 1 + if lines_beyond_cursor > self.console.height: + # No need to keep formatting lines. + # The console can't show them. + break prompt = self.get_prompt(ln, ll >= pos >= 0) while "\n" in prompt: pre_prompt, _, prompt = prompt.partition("\n") + last_refresh_line_end_offsets.append(offset) screen.append(pre_prompt) screeninfo.append((0, [])) pos -= ll + 1 @@ -276,10 +339,13 @@ def calc_screen(self) -> list[str]: l, l2 = disp_str(line) wrapcount = (wlen(l) + lp) // self.console.width if wrapcount == 0: + offset += ll + 1 # Takes all of the line plus the newline + last_refresh_line_end_offsets.append(offset) screen.append(prompt + l) screeninfo.append((lp, l2)) else: - for i in range(wrapcount + 1): + i = 0 + while l: prelen = lp if i == 0 else 0 index_to_wrap_before = 0 column = 0 @@ -289,21 +355,37 @@ def calc_screen(self) -> list[str]: index_to_wrap_before += 1 column += character_width pre = prompt if i == 0 else "" - post = "\\" if i != wrapcount else "" - after = [1] if i != wrapcount else [] + if len(l) > index_to_wrap_before: + offset += index_to_wrap_before + post = "\\" + after = [1] + else: + offset += index_to_wrap_before + 1 # Takes the newline + post = "" + after = [] + last_refresh_line_end_offsets.append(offset) screen.append(pre + l[:index_to_wrap_before] + post) screeninfo.append((prelen, l2[:index_to_wrap_before] + after)) l = l[index_to_wrap_before:] l2 = l2[index_to_wrap_before:] + i += 1 self.screeninfo = screeninfo self.cxy = self.pos2xy() - if self.msg and self.msg_at_bottom: + if self.msg: for mline in self.msg.split("\n"): screen.append(mline) screeninfo.append((0, [])) + + self.last_refresh_in_bracketed_paste = self.in_bracketed_paste + self.last_refresh_screen = screen.copy() + self.last_refresh_screeninfo = screeninfo.copy() + self.last_refresh_pos = self.pos + self.last_refresh_cxy = self.cxy + self.last_refresh_dimensions = dimensions return screen - def process_prompt(self, prompt: str) -> tuple[str, int]: + @staticmethod + def process_prompt(prompt: str) -> tuple[str, int]: """Process the prompt. This means calculate the length of the prompt. The character \x01 @@ -315,6 +397,11 @@ def process_prompt(self, prompt: str) -> tuple[str, int]: # sequences if they were not explicitly within \x01...\x02. # They are CSI (or ANSI) sequences ( ESC [ ... LETTER ) + # wlen from utils already excludes ANSI_ESCAPE_SEQUENCE chars, + # which breaks the logic below so we redefine it here. + def wlen(s: str) -> int: + return sum(str_width(i) for i in s) + out_prompt = "" l = wlen(prompt) pos = 0 @@ -407,27 +494,26 @@ def get_arg(self, default: int = 1) -> int: """ if self.arg is None: return default - else: - return self.arg + return self.arg def get_prompt(self, lineno: int, cursor_on_line: bool) -> str: """Return what should be in the left-hand margin for line `lineno'.""" if self.arg is not None and cursor_on_line: - prompt = "(arg: %s) " % self.arg + prompt = f"(arg: {self.arg}) " elif self.paste_mode: prompt = "(paste) " elif "\n" in self.buffer: if lineno == 0: prompt = self.ps2 - elif lineno == self.buffer.count("\n"): + elif self.ps4 and lineno == self.buffer.count("\n"): prompt = self.ps4 else: prompt = self.ps3 else: prompt = self.ps1 - if can_colorize(): + if self.can_colorize: prompt = f"{ANSIColors.BOLD_MAGENTA}{prompt}{ANSIColors.RESET}" return prompt @@ -480,12 +566,12 @@ def pos2xy(self) -> tuple[int, int]: offset = l - 1 if in_wrapped_line else l # need to remove backslash if offset >= pos: break + + if p + sum(l2) >= self.console.width: + pos -= l - 1 # -1 cause backslash is not in buffer else: - if p + sum(l2) >= self.console.width: - pos -= l - 1 # -1 cause backslash is not in buffer - else: - pos -= l + 1 # +1 cause newline is in buffer - y += 1 + pos -= l + 1 # +1 cause newline is in buffer + y += 1 return p + sum(l2[:pos]), y def insert(self, text: str | list[str]) -> None: @@ -523,6 +609,10 @@ def prepare(self) -> None: self.restore() raise + while self.scheduled_commands: + cmd = self.scheduled_commands.pop() + self.do_cmd((cmd, [])) + def last_command_is(self, cls: type) -> bool: if not self.last_command: return False @@ -543,7 +633,6 @@ def suspend(self) -> SimpleContextManager: for arg in ("msg", "ps1", "ps2", "ps3", "ps4", "paste_mode"): setattr(self, arg, prev_state[arg]) self.prepare() - pass def finish(self) -> None: """Called when a command signals that we're finished.""" @@ -560,9 +649,12 @@ def update_screen(self) -> None: def refresh(self) -> None: """Recalculate and refresh the screen.""" + if self.in_bracketed_paste and self.buffer and not self.buffer[-1] == "\n": + return + # this call sets up self.cxy, so call it first. - screen = self.calc_screen() - self.console.refresh(screen, self.cxy) + self.screen = self.calc_screen() + self.console.refresh(self.screen, self.cxy) self.dirty = False def do_cmd(self, cmd: tuple[str, list[str]]) -> None: @@ -606,7 +698,15 @@ def handle1(self, block: bool = True) -> bool: self.dirty = True while True: - event = self.console.get_event(block) + input_hook = self.console.input_hook + if input_hook: + input_hook() + # We use the same timeout as in readline.c: 100ms + while not self.console.wait(100): + input_hook() + event = self.console.get_event(block=False) + else: + event = self.console.get_event(block) if not event: # can only happen if we're not blocking return False diff --git a/Lib/_pyrepl/readline.py b/Lib/_pyrepl/readline.py index d28a7f3779f302..b10d0c66e4f813 100644 --- a/Lib/_pyrepl/readline.py +++ b/Lib/_pyrepl/readline.py @@ -28,16 +28,24 @@ from __future__ import annotations +import warnings from dataclasses import dataclass, field import os -import readline from site import gethistoryfile # type: ignore[attr-defined] import sys +from rlcompleter import Completer as RLCompleter from . import commands, historical_reader from .completing_reader import CompletingReader -from .unix_console import UnixConsole, _error +from .console import Console as ConsoleType + +Console: type[ConsoleType] +_error: tuple[type[Exception], ...] | type[Exception] +try: + from .unix_console import UnixConsole as Console, _error +except ImportError: + from .windows_console import WindowsConsole as Console, _error ENCODING = sys.getdefaultencoding() or "latin1" @@ -48,6 +56,9 @@ from .types import Callback, Completer, KeySpec, CommandName +MoreLinesCallable = Callable[[str], bool] + + __all__ = [ "add_history", "clear_history", @@ -81,7 +92,7 @@ @dataclass class ReadlineConfig: - readline_completer: Completer | None = readline.get_completer() + readline_completer: Completer | None = RLCompleter().complete completer_delims: frozenset[str] = frozenset(" \t\n`~!@#$%^&*()-=+[{]}\\|;:'\",<>/?") @@ -94,7 +105,8 @@ class ReadlineAlikeReader(historical_reader.HistoricalReader, CompletingReader): # Instance fields config: ReadlineConfig - more_lines: Callable[[str], bool] | None = None + more_lines: MoreLinesCallable | None = None + last_used_indentation: str | None = None def __post_init__(self) -> None: super().__post_init__() @@ -153,6 +165,11 @@ def get_trimmed_history(self, maxlength: int) -> list[str]: cut = 0 return self.history[cut:] + def update_last_used_indentation(self) -> None: + indentation = _get_first_indentation(self.buffer) + if indentation is not None: + self.last_used_indentation = indentation + # --- simplified support for reading multiline Python statements --- def collect_keymap(self) -> tuple[tuple[KeySpec, CommandName], ...]: @@ -207,25 +224,84 @@ def _get_previous_line_indent(buffer: list[str], pos: int) -> tuple[int, int | N return prevlinestart, indent +def _get_first_indentation(buffer: list[str]) -> str | None: + indented_line_start = None + for i in range(len(buffer)): + if (i < len(buffer) - 1 + and buffer[i] == "\n" + and buffer[i + 1] in " \t" + ): + indented_line_start = i + 1 + elif indented_line_start is not None and buffer[i] not in " \t\n": + return ''.join(buffer[indented_line_start : i]) + return None + + +def _should_auto_indent(buffer: list[str], pos: int) -> bool: + # check if last character before "pos" is a colon, ignoring + # whitespaces and comments. + last_char = None + while pos > 0: + pos -= 1 + if last_char is None: + if buffer[pos] not in " \t\n": # ignore whitespaces + last_char = buffer[pos] + else: + # even if we found a non-whitespace character before + # original pos, we keep going back until newline is reached + # to make sure we ignore comments + if buffer[pos] == "\n": + break + if buffer[pos] == "#": + last_char = None + return last_char == ":" + + class maybe_accept(commands.Command): def do(self) -> None: r: ReadlineAlikeReader r = self.reader # type: ignore[assignment] r.dirty = True # this is needed to hide the completion menu, if visible - # + + if self.reader.in_bracketed_paste: + r.insert("\n") + return + # if there are already several lines and the cursor # is not on the last one, always insert a new \n. text = r.get_unicode() + if "\n" in r.buffer[r.pos :] or ( r.more_lines is not None and r.more_lines(text) ): - # + def _newline_before_pos(): + before_idx = r.pos - 1 + while before_idx > 0 and text[before_idx].isspace(): + before_idx -= 1 + return text[before_idx : r.pos].count("\n") > 0 + + # if there's already a new line before the cursor then + # even if the cursor is followed by whitespace, we assume + # the user is trying to terminate the block + if _newline_before_pos() and text[r.pos:].isspace(): + self.finish = True + return + # auto-indent the next line like the previous line prevlinestart, indent = _get_previous_line_indent(r.buffer, r.pos) r.insert("\n") - if not self.reader.paste_mode and indent: - for i in range(prevlinestart, prevlinestart + indent): - r.insert(r.buffer[i]) + if not self.reader.paste_mode: + if indent: + for i in range(prevlinestart, prevlinestart + indent): + r.insert(r.buffer[i]) + r.update_last_used_indentation() + if _should_auto_indent(r.buffer, r.pos): + if r.last_used_indentation is not None: + indentation = r.last_used_indentation + else: + # default + indentation = " " * 4 + r.insert(indentation) elif not self.reader.paste_mode: self.finish = True else: @@ -274,7 +350,7 @@ def __post_init__(self) -> None: def get_reader(self) -> ReadlineAlikeReader: if self.reader is None: - console = UnixConsole(self.f_in, self.f_out, encoding=ENCODING) + console = Console(self.f_in, self.f_out, encoding=ENCODING) self.reader = ReadlineAlikeReader(console=console, config=self.config) return self.reader @@ -287,7 +363,7 @@ def input(self, prompt: object = "") -> str: reader.ps1 = str(prompt) return reader.readline(startup_hook=self.startup_hook) - def multiline_input(self, more_lines, ps1, ps2): + def multiline_input(self, more_lines: MoreLinesCallable, ps1: str, ps2: str) -> str: """Read an input on possibly multiple lines, asking for more lines as long as 'more_lines(unicodetext)' returns an object whose boolean value is true. @@ -296,13 +372,15 @@ def multiline_input(self, more_lines, ps1, ps2): saved = reader.more_lines try: reader.more_lines = more_lines - reader.ps1 = reader.ps2 = ps1 - reader.ps3 = reader.ps4 = ps2 - return reader.readline(), reader.was_paste_mode_activated + reader.ps1 = ps1 + reader.ps2 = ps1 + reader.ps3 = ps2 + reader.ps4 = "" + with warnings.catch_warnings(action="ignore"): + return reader.readline() finally: reader.more_lines = saved reader.paste_mode = False - reader.was_paste_mode_activated = False def parse_and_bind(self, string: str) -> None: pass # XXX we don't support parsing GNU-readline-style init files diff --git a/Lib/_pyrepl/simple_interact.py b/Lib/_pyrepl/simple_interact.py index 31b2097a78a226..2e5698eb131684 100644 --- a/Lib/_pyrepl/simple_interact.py +++ b/Lib/_pyrepl/simple_interact.py @@ -25,16 +25,20 @@ from __future__ import annotations -import _colorize # type: ignore[import-not-found] import _sitebuiltins import linecache import sys import code from types import ModuleType +from .console import InteractiveColoredConsole from .readline import _get_reader, multiline_input -from .unix_console import _error +_error: tuple[type[Exception], ...] | type[Exception] +try: + from .unix_console import _error +except ModuleNotFoundError: + from .windows_console import _error def check() -> str: """Returns the error message if there is a problem initializing the state.""" @@ -56,37 +60,34 @@ def _strip_final_indent(text: str) -> str: return text +def _clear_screen(): + reader = _get_reader() + reader.scheduled_commands.append("clear_screen") + + REPL_COMMANDS = { "exit": _sitebuiltins.Quitter('exit', ''), "quit": _sitebuiltins.Quitter('quit' ,''), "copyright": _sitebuiltins._Printer('copyright', sys.copyright), "help": "help", + "clear": _clear_screen, } -class InteractiveColoredConsole(code.InteractiveConsole): - def __init__( - self, - locals: dict[str, object] | None = None, - filename: str = "", - *, - local_exit: bool = False, - ) -> None: - super().__init__(locals=locals, filename=filename, local_exit=local_exit) # type: ignore[call-arg] - self.can_colorize = _colorize.can_colorize() - - def showtraceback(self): - super().showtraceback(colorize=self.can_colorize) - def run_multiline_interactive_console( - mainmodule: ModuleType | None= None, future_flags: int = 0 + mainmodule: ModuleType | None = None, + future_flags: int = 0, + console: code.InteractiveConsole | None = None, ) -> None: import __main__ from .readline import _setup _setup() mainmodule = mainmodule or __main__ - console = InteractiveColoredConsole(mainmodule.__dict__, filename="") + if console is None: + console = InteractiveColoredConsole( + mainmodule.__dict__, filename="" + ) if future_flags: console.compile.compiler.flags |= future_flags @@ -135,7 +136,7 @@ def more_lines(unicodetext: str) -> bool: ps1 = getattr(sys, "ps1", ">>> ") ps2 = getattr(sys, "ps2", "... ") try: - statement, contains_pasted_code = multiline_input(more_lines, ps1, ps2) + statement = multiline_input(more_lines, ps1, ps2) except EOFError: break @@ -144,14 +145,11 @@ def more_lines(unicodetext: str) -> bool: input_name = f"" linecache._register_code(input_name, statement, "") # type: ignore[attr-defined] - symbol = "single" if not contains_pasted_code else "exec" - more = console.push(_strip_final_indent(statement), filename=input_name, _symbol=symbol) # type: ignore[call-arg] - if contains_pasted_code and more: - more = console.push(_strip_final_indent(statement), filename=input_name, _symbol="single") # type: ignore[call-arg] + more = console.push(_strip_final_indent(statement), filename=input_name, _symbol="single") # type: ignore[call-arg] assert not more input_n += 1 except KeyboardInterrupt: - console.write("\nKeyboardInterrupt\n") + console.write("KeyboardInterrupt\n") console.resetbuffer() except MemoryError: console.write("\nMemoryError\n") diff --git a/Lib/_pyrepl/unix_console.py b/Lib/_pyrepl/unix_console.py index 605318c82ae2ea..f1a6b84adfb671 100644 --- a/Lib/_pyrepl/unix_console.py +++ b/Lib/_pyrepl/unix_console.py @@ -40,9 +40,13 @@ from .utils import wlen +TYPE_CHECKING = False + # types -if False: - from typing import IO +if TYPE_CHECKING: + from typing import IO, Literal, overload +else: + overload = lambda func: None class InvalidTerminal(RuntimeError): @@ -114,9 +118,12 @@ def __init__(self): def register(self, fd, flag): self.fd = fd - - def poll(self): # note: a 'timeout' argument would be *milliseconds* - r, w, e = select.select([self.fd], [], []) + # note: The 'timeout' argument is received as *milliseconds* + def poll(self, timeout: float | None = None) -> list[int]: + if timeout is None: + r, w, e = select.select([self.fd], [], []) + else: + r, w, e = select.select([self.fd], [], [], timeout/1000) return r poll = MinimalPoll # type: ignore[assignment] @@ -139,25 +146,22 @@ def __init__( - term (str): Terminal name. - encoding (str): Encoding to use for I/O operations. """ - - self.encoding = encoding or sys.getdefaultencoding() - - if isinstance(f_in, int): - self.input_fd = f_in - else: - self.input_fd = f_in.fileno() - - if isinstance(f_out, int): - self.output_fd = f_out - else: - self.output_fd = f_out.fileno() + super().__init__(f_in, f_out, term, encoding) self.pollob = poll() self.pollob.register(self.input_fd, select.POLLIN) + self.input_buffer = b"" + self.input_buffer_pos = 0 curses.setupterm(term or None, self.output_fd) self.term = term - def _my_getstr(cap, optional=0): + @overload + def _my_getstr(cap: str, optional: Literal[False] = False) -> bytes: ... + + @overload + def _my_getstr(cap: str, optional: bool) -> bytes | None: ... + + def _my_getstr(cap: str, optional: bool = False) -> bytes | None: r = curses.tigetstr(cap) if not optional and r is None: raise InvalidTerminal( @@ -195,6 +199,18 @@ def _my_getstr(cap, optional=0): self.event_queue = EventQueue(self.input_fd, self.encoding) self.cursor_visible = 1 + def __read(self, n: int) -> bytes: + if not self.input_buffer or self.input_buffer_pos >= len(self.input_buffer): + self.input_buffer = os.read(self.input_fd, 10000) + + ret = self.input_buffer[self.input_buffer_pos : self.input_buffer_pos + n] + self.input_buffer_pos += len(ret) + if self.input_buffer_pos >= len(self.input_buffer): + self.input_buffer = b"" + self.input_buffer_pos = 0 + return ret + + def change_encoding(self, encoding: str) -> None: """ Change the encoding used for I/O operations. @@ -283,7 +299,7 @@ def refresh(self, screen, c_xy): self.__show_cursor() - self.screen = screen + self.screen = screen.copy() self.move_cursor(cx, cy) self.flushoutput() @@ -371,7 +387,7 @@ def get_event(self, block: bool = True) -> Event | None: while self.event_queue.empty(): while True: try: - self.push_char(os.read(self.input_fd, 1)) + self.push_char(self.__read(1)) except OSError as err: if err.errno == errno.EINTR: if not self.event_queue.empty(): @@ -386,11 +402,11 @@ def get_event(self, block: bool = True) -> Event | None: break return self.event_queue.get() - def wait(self): + def wait(self, timeout: float | None = None) -> bool: """ Wait for events on the console. """ - self.pollob.poll() + return bool(self.pollob.poll(timeout)) def set_cursor_vis(self, visible): """ @@ -489,7 +505,7 @@ def getpending(self): e.raw += e.raw amount = struct.unpack("i", ioctl(self.input_fd, FIONREAD, b"\0\0\0\0"))[0] - raw = os.read(self.input_fd, amount) + raw = self.__read(amount) data = str(raw, self.encoding, "replace") e.data += data e.raw += raw @@ -512,7 +528,7 @@ def getpending(self): e.raw += e.raw amount = 10000 - raw = os.read(self.input_fd, amount) + raw = self.__read(amount) data = str(raw, self.encoding, "replace") e.data += data e.raw += raw @@ -528,6 +544,15 @@ def clear(self): self.__posxy = 0, 0 self.screen = [] + @property + def input_hook(self): + try: + import posix + except ImportError: + return None + if posix._is_inputhook_installed(): + return posix._inputhook + def __enable_bracketed_paste(self) -> None: os.write(self.output_fd, b"\x1b[?2004h") @@ -582,14 +607,19 @@ def __write_changed_line(self, y, oldline, newline, px_coord): px_pos = 0 j = 0 for c in oldline: - if j >= px_coord: break + if j >= px_coord: + break j += wlen(c) px_pos += 1 # reuse the oldline as much as possible, but stop as soon as we # encounter an ESCAPE, because it might be the start of an escape # sequene - while x_coord < minlen and oldline[x_pos] == newline[x_pos] and newline[x_pos] != "\x1b": + while ( + x_coord < minlen + and oldline[x_pos] == newline[x_pos] + and newline[x_pos] != "\x1b" + ): x_coord += wlen(newline[x_pos]) x_pos += 1 @@ -609,7 +639,11 @@ def __write_changed_line(self, y, oldline, newline, px_coord): self.__posxy = x_coord + character_width, y # if it's a single character change in the middle of the line - elif x_coord < minlen and oldline[x_pos + 1 :] == newline[x_pos + 1 :] and wlen(oldline[x_pos]) == wlen(newline[x_pos]): + elif ( + x_coord < minlen + and oldline[x_pos + 1 :] == newline[x_pos + 1 :] + and wlen(oldline[x_pos]) == wlen(newline[x_pos]) + ): character_width = wlen(newline[x_pos]) self.__move(x_coord, y) self.__write(newline[x_pos]) @@ -672,18 +706,18 @@ def __move_y_cuu_cud(self, y): elif dy < 0: self.__write_code(self._cuu, -dy) - def __move_x_hpa(self, x): + def __move_x_hpa(self, x: int) -> None: if x != self.__posxy[0]: self.__write_code(self._hpa, x) - def __move_x_cub1_cuf1(self, x): + def __move_x_cub1_cuf1(self, x: int) -> None: dx = x - self.__posxy[0] if dx > 0: self.__write_code(self._cuf1 * dx) elif dx < 0: self.__write_code(self._cub1 * (-dx)) - def __move_x_cub_cuf(self, x): + def __move_x_cub_cuf(self, x: int) -> None: dx = x - self.__posxy[0] if dx > 0: self.__write_code(self._cuf, dx) diff --git a/Lib/_pyrepl/utils.py b/Lib/_pyrepl/utils.py index cd1df7c49a216d..20dbb1f7e17229 100644 --- a/Lib/_pyrepl/utils.py +++ b/Lib/_pyrepl/utils.py @@ -1,10 +1,14 @@ import re import unicodedata +import functools ANSI_ESCAPE_SEQUENCE = re.compile(r"\x1b\[[ -@]*[A-~]") +@functools.cache def str_width(c: str) -> int: + if ord(c) < 128: + return 1 w = unicodedata.east_asian_width(c) if w in ('N', 'Na', 'H', 'A'): return 1 @@ -12,7 +16,9 @@ def str_width(c: str) -> int: def wlen(s: str) -> int: + if len(s) == 1: + return str_width(s) length = sum(str_width(i) for i in s) - # remove lengths of any escape sequences - return length - sum(len(i) for i in ANSI_ESCAPE_SEQUENCE.findall(s)) + sequence = ANSI_ESCAPE_SEQUENCE.findall(s) + return length - sum(len(i) for i in sequence) diff --git a/Lib/_pyrepl/windows_console.py b/Lib/_pyrepl/windows_console.py new file mode 100644 index 00000000000000..f691ca3fbb07b8 --- /dev/null +++ b/Lib/_pyrepl/windows_console.py @@ -0,0 +1,605 @@ +# Copyright 2000-2004 Michael Hudson-Doyle +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +from __future__ import annotations + +import io +from multiprocessing import Value +import os +import sys +import time +import msvcrt + +from abc import ABC, abstractmethod +from collections import deque +from dataclasses import dataclass, field +import ctypes +from ctypes.wintypes import ( + _COORD, + WORD, + SMALL_RECT, + BOOL, + HANDLE, + CHAR, + DWORD, + WCHAR, + SHORT, +) +from ctypes import Structure, POINTER, Union +from .console import Event, Console +from .trace import trace +from .utils import wlen + +try: + from ctypes import GetLastError, WinDLL, windll, WinError # type: ignore[attr-defined] +except: + # Keep MyPy happy off Windows + from ctypes import CDLL as WinDLL, cdll as windll + + def GetLastError() -> int: + return 42 + + class WinError(OSError): # type: ignore[no-redef] + def __init__(self, err: int | None, descr: str | None = None) -> None: + self.err = err + self.descr = descr + + +TYPE_CHECKING = False + +if TYPE_CHECKING: + from typing import IO + +VK_MAP: dict[int, str] = { + 0x23: "end", # VK_END + 0x24: "home", # VK_HOME + 0x25: "left", # VK_LEFT + 0x26: "up", # VK_UP + 0x27: "right", # VK_RIGHT + 0x28: "down", # VK_DOWN + 0x2E: "delete", # VK_DELETE + 0x70: "f1", # VK_F1 + 0x71: "f2", # VK_F2 + 0x72: "f3", # VK_F3 + 0x73: "f4", # VK_F4 + 0x74: "f5", # VK_F5 + 0x75: "f6", # VK_F6 + 0x76: "f7", # VK_F7 + 0x77: "f8", # VK_F8 + 0x78: "f9", # VK_F9 + 0x79: "f10", # VK_F10 + 0x7A: "f11", # VK_F11 + 0x7B: "f12", # VK_F12 + 0x7C: "f13", # VK_F13 + 0x7D: "f14", # VK_F14 + 0x7E: "f15", # VK_F15 + 0x7F: "f16", # VK_F16 + 0x79: "f17", # VK_F17 + 0x80: "f18", # VK_F18 + 0x81: "f19", # VK_F19 + 0x82: "f20", # VK_F20 +} + +# Console escape codes: https://learn.microsoft.com/en-us/windows/console/console-virtual-terminal-sequences +ERASE_IN_LINE = "\x1b[K" +MOVE_LEFT = "\x1b[{}D" +MOVE_RIGHT = "\x1b[{}C" +MOVE_UP = "\x1b[{}A" +MOVE_DOWN = "\x1b[{}B" +CLEAR = "\x1b[H\x1b[J" + + +class _error(Exception): + pass + + +class WindowsConsole(Console): + def __init__( + self, + f_in: IO[bytes] | int = 0, + f_out: IO[bytes] | int = 1, + term: str = "", + encoding: str = "", + ): + super().__init__(f_in, f_out, term, encoding) + + SetConsoleMode( + OutHandle, + ENABLE_WRAP_AT_EOL_OUTPUT + | ENABLE_PROCESSED_OUTPUT + | ENABLE_VIRTUAL_TERMINAL_PROCESSING, + ) + self.screen: list[str] = [] + self.width = 80 + self.height = 25 + self.__offset = 0 + self.event_queue: deque[Event] = deque() + try: + self.out = io._WindowsConsoleIO(self.output_fd, "w") # type: ignore[attr-defined] + except ValueError: + # Console I/O is redirected, fallback... + self.out = None + + def refresh(self, screen: list[str], c_xy: tuple[int, int]) -> None: + """ + Refresh the console screen. + + Parameters: + - screen (list): List of strings representing the screen contents. + - c_xy (tuple): Cursor position (x, y) on the screen. + """ + cx, cy = c_xy + + while len(self.screen) < min(len(screen), self.height): + self._hide_cursor() + self._move_relative(0, len(self.screen) - 1) + self.__write("\n") + self.__posxy = 0, len(self.screen) + self.screen.append("") + + px, py = self.__posxy + old_offset = offset = self.__offset + height = self.height + + # we make sure the cursor is on the screen, and that we're + # using all of the screen if we can + if cy < offset: + offset = cy + elif cy >= offset + height: + offset = cy - height + 1 + scroll_lines = offset - old_offset + + # Scrolling the buffer as the current input is greater than the visible + # portion of the window. We need to scroll the visible portion and the + # entire history + self._scroll(scroll_lines, self._getscrollbacksize()) + self.__posxy = self.__posxy[0], self.__posxy[1] + scroll_lines + self.__offset += scroll_lines + + for i in range(scroll_lines): + self.screen.append("") + elif offset > 0 and len(screen) < offset + height: + offset = max(len(screen) - height, 0) + screen.append("") + + oldscr = self.screen[old_offset : old_offset + height] + newscr = screen[offset : offset + height] + + self.__offset = offset + + self._hide_cursor() + for ( + y, + oldline, + newline, + ) in zip(range(offset, offset + height), oldscr, newscr): + if oldline != newline: + self.__write_changed_line(y, oldline, newline, px) + + y = len(newscr) + while y < len(oldscr): + self._move_relative(0, y) + self.__posxy = 0, y + self._erase_to_end() + y += 1 + + self._show_cursor() + + self.screen = screen + self.move_cursor(cx, cy) + + @property + def input_hook(self): + try: + import nt + except ImportError: + return None + if nt._is_inputhook_installed(): + return nt._inputhook + + def __write_changed_line( + self, y: int, oldline: str, newline: str, px_coord: int + ) -> None: + # this is frustrating; there's no reason to test (say) + # self.dch1 inside the loop -- but alternative ways of + # structuring this function are equally painful (I'm trying to + # avoid writing code generators these days...) + minlen = min(wlen(oldline), wlen(newline)) + x_pos = 0 + x_coord = 0 + + px_pos = 0 + j = 0 + for c in oldline: + if j >= px_coord: + break + j += wlen(c) + px_pos += 1 + + # reuse the oldline as much as possible, but stop as soon as we + # encounter an ESCAPE, because it might be the start of an escape + # sequene + while ( + x_coord < minlen + and oldline[x_pos] == newline[x_pos] + and newline[x_pos] != "\x1b" + ): + x_coord += wlen(newline[x_pos]) + x_pos += 1 + + self._hide_cursor() + self._move_relative(x_coord, y) + if wlen(oldline) > wlen(newline): + self._erase_to_end() + + self.__write(newline[x_pos:]) + if wlen(newline) == self.width: + # If we wrapped we want to start at the next line + self._move_relative(0, y + 1) + self.__posxy = 0, y + 1 + else: + self.__posxy = wlen(newline), y + + if "\x1b" in newline or y != self.__posxy[1]: + # ANSI escape characters are present, so we can't assume + # anything about the position of the cursor. Moving the cursor + # to the left margin should work to get to a known position. + self.move_cursor(0, y) + + def _scroll( + self, top: int, bottom: int, left: int | None = None, right: int | None = None + ) -> None: + scroll_rect = SMALL_RECT() + scroll_rect.Top = SHORT(top) + scroll_rect.Bottom = SHORT(bottom) + scroll_rect.Left = SHORT(0 if left is None else left) + scroll_rect.Right = SHORT( + self.getheightwidth()[1] - 1 if right is None else right + ) + destination_origin = _COORD() + fill_info = CHAR_INFO() + fill_info.UnicodeChar = " " + + if not ScrollConsoleScreenBuffer( + OutHandle, scroll_rect, None, destination_origin, fill_info + ): + raise WinError(GetLastError()) + + def _hide_cursor(self): + self.__write("\x1b[?25l") + + def _show_cursor(self): + self.__write("\x1b[?25h") + + def _enable_blinking(self): + self.__write("\x1b[?12h") + + def _disable_blinking(self): + self.__write("\x1b[?12l") + + def __write(self, text: str) -> None: + if self.out is not None: + self.out.write(text.encode(self.encoding, "replace")) + self.out.flush() + else: + os.write(self.output_fd, text.encode(self.encoding, "replace")) + + @property + def screen_xy(self) -> tuple[int, int]: + info = CONSOLE_SCREEN_BUFFER_INFO() + if not GetConsoleScreenBufferInfo(OutHandle, info): + raise WinError(GetLastError()) + return info.dwCursorPosition.X, info.dwCursorPosition.Y + + def _erase_to_end(self) -> None: + self.__write(ERASE_IN_LINE) + + def prepare(self) -> None: + trace("prepare") + self.screen = [] + self.height, self.width = self.getheightwidth() + + self.__posxy = 0, 0 + self.__gone_tall = 0 + self.__offset = 0 + + def restore(self) -> None: + pass + + def _move_relative(self, x: int, y: int) -> None: + """Moves relative to the current __posxy""" + dx = x - self.__posxy[0] + dy = y - self.__posxy[1] + if dx < 0: + self.__write(MOVE_LEFT.format(-dx)) + elif dx > 0: + self.__write(MOVE_RIGHT.format(dx)) + + if dy < 0: + self.__write(MOVE_UP.format(-dy)) + elif dy > 0: + self.__write(MOVE_DOWN.format(dy)) + + def move_cursor(self, x: int, y: int) -> None: + if x < 0 or y < 0: + raise ValueError(f"Bad cursor position {x}, {y}") + + if y < self.__offset or y >= self.__offset + self.height: + self.event_queue.insert(0, Event("scroll", "")) + else: + self._move_relative(x, y) + self.__posxy = x, y + + def set_cursor_vis(self, visible: bool) -> None: + if visible: + self._show_cursor() + else: + self._hide_cursor() + + def getheightwidth(self) -> tuple[int, int]: + """Return (height, width) where height and width are the height + and width of the terminal window in characters.""" + info = CONSOLE_SCREEN_BUFFER_INFO() + if not GetConsoleScreenBufferInfo(OutHandle, info): + raise WinError(GetLastError()) + return ( + info.srWindow.Bottom - info.srWindow.Top + 1, + info.srWindow.Right - info.srWindow.Left + 1, + ) + + def _getscrollbacksize(self) -> int: + info = CONSOLE_SCREEN_BUFFER_INFO() + if not GetConsoleScreenBufferInfo(OutHandle, info): + raise WinError(GetLastError()) + + return info.srWindow.Bottom # type: ignore[no-any-return] + + def _read_input(self) -> INPUT_RECORD | None: + rec = INPUT_RECORD() + read = DWORD() + if not ReadConsoleInput(InHandle, rec, 1, read): + raise WinError(GetLastError()) + + if read.value == 0: + return None + + return rec + + def get_event(self, block: bool = True) -> Event | None: + """Return an Event instance. Returns None if |block| is false + and there is no event pending, otherwise waits for the + completion of an event.""" + if self.event_queue: + return self.event_queue.pop() + + while True: + rec = self._read_input() + if rec is None: + if block: + continue + return None + + if rec.EventType == WINDOW_BUFFER_SIZE_EVENT: + return Event("resize", "") + + if rec.EventType != KEY_EVENT or not rec.Event.KeyEvent.bKeyDown: + # Only process keys and keydown events + if block: + continue + return None + + key = rec.Event.KeyEvent.uChar.UnicodeChar + + if rec.Event.KeyEvent.uChar.UnicodeChar == "\r": + # Make enter make unix-like + return Event(evt="key", data="\n", raw=b"\n") + elif rec.Event.KeyEvent.wVirtualKeyCode == 8: + # Turn backspace directly into the command + return Event( + evt="key", + data="backspace", + raw=rec.Event.KeyEvent.uChar.UnicodeChar, + ) + elif rec.Event.KeyEvent.uChar.UnicodeChar == "\x00": + # Handle special keys like arrow keys and translate them into the appropriate command + code = VK_MAP.get(rec.Event.KeyEvent.wVirtualKeyCode) + if code: + return Event( + evt="key", data=code, raw=rec.Event.KeyEvent.uChar.UnicodeChar + ) + if block: + continue + + return None + + return Event(evt="key", data=key, raw=rec.Event.KeyEvent.uChar.UnicodeChar) + + def push_char(self, char: int | bytes) -> None: + """ + Push a character to the console event queue. + """ + raise NotImplementedError("push_char not supported on Windows") + + def beep(self) -> None: + self.__write("\x07") + + def clear(self) -> None: + """Wipe the screen""" + self.__write(CLEAR) + self.__posxy = 0, 0 + self.screen = [""] + + def finish(self) -> None: + """Move the cursor to the end of the display and otherwise get + ready for end. XXX could be merged with restore? Hmm.""" + y = len(self.screen) - 1 + while y >= 0 and not self.screen[y]: + y -= 1 + self._move_relative(0, min(y, self.height + self.__offset - 1)) + self.__write("\r\n") + + def flushoutput(self) -> None: + """Flush all output to the screen (assuming there's some + buffering going on somewhere). + + All output on Windows is unbuffered so this is a nop""" + pass + + def forgetinput(self) -> None: + """Forget all pending, but not yet processed input.""" + while self._read_input() is not None: + pass + + def getpending(self) -> Event: + """Return the characters that have been typed but not yet + processed.""" + return Event("key", "", b"") + + def wait(self, timeout: float | None) -> bool: + """Wait for an event.""" + # Poor man's Windows select loop + start_time = time.time() + while True: + if msvcrt.kbhit(): # type: ignore[attr-defined] + return True + if timeout and time.time() - start_time > timeout: + return False + time.sleep(0.01) + + def repaint(self) -> None: + raise NotImplementedError("No repaint support") + + +# Windows interop +class CONSOLE_SCREEN_BUFFER_INFO(Structure): + _fields_ = [ + ("dwSize", _COORD), + ("dwCursorPosition", _COORD), + ("wAttributes", WORD), + ("srWindow", SMALL_RECT), + ("dwMaximumWindowSize", _COORD), + ] + + +class CONSOLE_CURSOR_INFO(Structure): + _fields_ = [ + ("dwSize", DWORD), + ("bVisible", BOOL), + ] + + +class CHAR_INFO(Structure): + _fields_ = [ + ("UnicodeChar", WCHAR), + ("Attributes", WORD), + ] + + +class Char(Union): + _fields_ = [ + ("UnicodeChar", WCHAR), + ("Char", CHAR), + ] + + +class KeyEvent(ctypes.Structure): + _fields_ = [ + ("bKeyDown", BOOL), + ("wRepeatCount", WORD), + ("wVirtualKeyCode", WORD), + ("wVirtualScanCode", WORD), + ("uChar", Char), + ("dwControlKeyState", DWORD), + ] + + +class WindowsBufferSizeEvent(ctypes.Structure): + _fields_ = [("dwSize", _COORD)] + + +class ConsoleEvent(ctypes.Union): + _fields_ = [ + ("KeyEvent", KeyEvent), + ("WindowsBufferSizeEvent", WindowsBufferSizeEvent), + ] + + +class INPUT_RECORD(Structure): + _fields_ = [("EventType", WORD), ("Event", ConsoleEvent)] + + +KEY_EVENT = 0x01 +FOCUS_EVENT = 0x10 +MENU_EVENT = 0x08 +MOUSE_EVENT = 0x02 +WINDOW_BUFFER_SIZE_EVENT = 0x04 + +ENABLE_PROCESSED_OUTPUT = 0x01 +ENABLE_WRAP_AT_EOL_OUTPUT = 0x02 +ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x04 + +STD_INPUT_HANDLE = -10 +STD_OUTPUT_HANDLE = -11 + +if sys.platform == "win32": + _KERNEL32 = WinDLL("kernel32", use_last_error=True) + + GetStdHandle = windll.kernel32.GetStdHandle + GetStdHandle.argtypes = [DWORD] + GetStdHandle.restype = HANDLE + + GetConsoleScreenBufferInfo = _KERNEL32.GetConsoleScreenBufferInfo + GetConsoleScreenBufferInfo.argtypes = [ + HANDLE, + ctypes.POINTER(CONSOLE_SCREEN_BUFFER_INFO), + ] + GetConsoleScreenBufferInfo.restype = BOOL + + ScrollConsoleScreenBuffer = _KERNEL32.ScrollConsoleScreenBufferW + ScrollConsoleScreenBuffer.argtypes = [ + HANDLE, + POINTER(SMALL_RECT), + POINTER(SMALL_RECT), + _COORD, + POINTER(CHAR_INFO), + ] + ScrollConsoleScreenBuffer.restype = BOOL + + SetConsoleMode = _KERNEL32.SetConsoleMode + SetConsoleMode.argtypes = [HANDLE, DWORD] + SetConsoleMode.restype = BOOL + + ReadConsoleInput = _KERNEL32.ReadConsoleInputW + ReadConsoleInput.argtypes = [HANDLE, POINTER(INPUT_RECORD), DWORD, POINTER(DWORD)] + ReadConsoleInput.restype = BOOL + + OutHandle = GetStdHandle(STD_OUTPUT_HANDLE) + InHandle = GetStdHandle(STD_INPUT_HANDLE) +else: + + def _win_only(*args, **kwargs): + raise NotImplementedError("Windows only") + + GetStdHandle = _win_only + GetConsoleScreenBufferInfo = _win_only + ScrollConsoleScreenBuffer = _win_only + SetConsoleMode = _win_only + ReadConsoleInput = _win_only + OutHandle = 0 + InHandle = 0 diff --git a/Lib/asyncio/__main__.py b/Lib/asyncio/__main__.py index cbc1d7c93ef76f..91fff9aaee337b 100644 --- a/Lib/asyncio/__main__.py +++ b/Lib/asyncio/__main__.py @@ -1,42 +1,49 @@ import ast import asyncio -import code import concurrent.futures import inspect +import os import site import sys import threading import types import warnings +from _colorize import can_colorize, ANSIColors # type: ignore[import-not-found] +from _pyrepl.console import InteractiveColoredConsole + from . import futures -class AsyncIOInteractiveConsole(code.InteractiveConsole): +class AsyncIOInteractiveConsole(InteractiveColoredConsole): def __init__(self, locals, loop): - super().__init__(locals) + super().__init__(locals, filename="") self.compile.compiler.flags |= ast.PyCF_ALLOW_TOP_LEVEL_AWAIT self.loop = loop def runcode(self, code): + global return_code future = concurrent.futures.Future() def callback(): + global return_code global repl_future - global repl_future_interrupted + global keyboard_interrupted repl_future = None - repl_future_interrupted = False + keyboard_interrupted = False func = types.FunctionType(code, self.locals) try: coro = func() - except SystemExit: - raise + except SystemExit as se: + return_code = se.code + self.loop.stop() + return except KeyboardInterrupt as ex: - repl_future_interrupted = True + keyboard_interrupted = True future.set_exception(ex) return except BaseException as ex: @@ -57,10 +64,12 @@ def callback(): try: return future.result() - except SystemExit: - raise + except SystemExit as se: + return_code = se.code + self.loop.stop() + return except BaseException: - if repl_future_interrupted: + if keyboard_interrupted: self.write("\nKeyboardInterrupt\n") else: self.showtraceback() @@ -69,18 +78,56 @@ def callback(): class REPLThread(threading.Thread): def run(self): + global return_code + try: banner = ( f'asyncio REPL {sys.version} on {sys.platform}\n' f'Use "await" directly instead of "asyncio.run()".\n' f'Type "help", "copyright", "credits" or "license" ' f'for more information.\n' - f'{getattr(sys, "ps1", ">>> ")}import asyncio' ) - console.interact( - banner=banner, - exitmsg='exiting asyncio REPL...') + console.write(banner) + + if startup_path := os.getenv("PYTHONSTARTUP"): + import tokenize + with tokenize.open(startup_path) as f: + startup_code = compile(f.read(), startup_path, "exec") + exec(startup_code, console.locals) + + ps1 = getattr(sys, "ps1", ">>> ") + if can_colorize(): + ps1 = f"{ANSIColors.BOLD_MAGENTA}{ps1}{ANSIColors.RESET}" + console.write(f"{ps1}import asyncio\n") + + try: + import errno + if os.getenv("PYTHON_BASIC_REPL"): + raise RuntimeError("user environment requested basic REPL") + if not os.isatty(sys.stdin.fileno()): + raise OSError(errno.ENOTTY, "tty required", "stdin") + + # This import will fail on operating systems with no termios. + from _pyrepl.simple_interact import ( + check, + run_multiline_interactive_console, + ) + if err := check(): + raise RuntimeError(err) + except Exception as e: + console.interact(banner="", exitmsg=exit_message) + else: + try: + run_multiline_interactive_console(console=console) + except SystemExit: + # expected via the `exit` and `quit` commands + pass + except BaseException: + # unexpected issue + console.showtraceback() + console.write("Internal error, ") + return_code = 1 finally: warnings.filterwarnings( 'ignore', @@ -91,6 +138,9 @@ def run(self): if __name__ == '__main__': + CAN_USE_PYREPL = True + + return_code = 0 loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) @@ -103,12 +153,12 @@ def run(self): console = AsyncIOInteractiveConsole(repl_locals, loop) repl_future = None - repl_future_interrupted = False + keyboard_interrupted = False try: import readline # NoQA except ImportError: - pass + readline = None interactive_hook = getattr(sys, "__interactivehook__", None) @@ -122,10 +172,11 @@ def run(self): except: pass else: - completer = rlcompleter.Completer(console.locals) - readline.set_completer(completer.complete) + if readline is not None: + completer = rlcompleter.Completer(console.locals) + readline.set_completer(completer.complete) - repl_thread = REPLThread() + repl_thread = REPLThread(name="Interactive thread") repl_thread.daemon = True repl_thread.start() @@ -133,9 +184,12 @@ def run(self): try: loop.run_forever() except KeyboardInterrupt: + keyboard_interrupted = True if repl_future and not repl_future.done(): repl_future.cancel() - repl_future_interrupted = True continue else: break + + console.write('exiting asyncio REPL...\n') + sys.exit(return_code) diff --git a/Lib/asyncio/proactor_events.py b/Lib/asyncio/proactor_events.py index 397a8cda757895..7eb55bd63ddb73 100644 --- a/Lib/asyncio/proactor_events.py +++ b/Lib/asyncio/proactor_events.py @@ -721,6 +721,8 @@ async def sock_sendto(self, sock, data, address): return await self._proactor.sendto(sock, data, 0, address) async def sock_connect(self, sock, address): + if self._debug and sock.gettimeout() != 0: + raise ValueError("the socket must be non-blocking") return await self._proactor.connect(sock, address) async def sock_accept(self, sock): diff --git a/Lib/asyncio/staggered.py b/Lib/asyncio/staggered.py index e180cde0243b15..c3a7441a7b091d 100644 --- a/Lib/asyncio/staggered.py +++ b/Lib/asyncio/staggered.py @@ -69,8 +69,7 @@ async def staggered_race(coro_fns, delay, *, loop=None): exceptions = [] running_tasks = [] - async def run_one_coro( - previous_failed: typing.Optional[locks.Event]) -> None: + async def run_one_coro(previous_failed) -> None: # Wait for the previous task to finish, or for delay seconds if previous_failed is not None: with contextlib.suppress(exceptions_mod.TimeoutError): diff --git a/Lib/base64.py b/Lib/base64.py index 25164d1a1df4fc..5a7e790a193380 100755 --- a/Lib/base64.py +++ b/Lib/base64.py @@ -332,7 +332,7 @@ def a85encode(b, *, foldspaces=False, wrapcol=0, pad=False, adobe=False): wrapcol controls whether the output should have newline (b'\\n') characters added to it. If this is non-zero, each output line will be at most this - many characters long. + many characters long, excluding the trailing newline. pad controls whether the input is padded to a multiple of 4 before encoding. Note that the btoa implementation always pads. diff --git a/Lib/bdb.py b/Lib/bdb.py index 7d63fce6ca63f1..aa621053cfb4bc 100644 --- a/Lib/bdb.py +++ b/Lib/bdb.py @@ -165,6 +165,11 @@ def dispatch_return(self, frame, arg): # The user issued a 'next' or 'until' command. if self.stopframe is frame and self.stoplineno != -1: self._set_stopinfo(None, None) + # The previous frame might not have f_trace set, unless we are + # issuing a command that does not expect to stop, we should set + # f_trace + if self.stoplineno != -1: + self._set_caller_tracefunc(frame) return self.trace_dispatch def dispatch_exception(self, frame, arg): @@ -320,15 +325,14 @@ def _set_stopinfo(self, stopframe, returnframe, stoplineno=0, opcode=False): self.stoplineno = stoplineno self._set_trace_opcodes(opcode) - def _set_caller_tracefunc(self): + def _set_caller_tracefunc(self, current_frame): # Issue #13183: pdb skips frames after hitting a breakpoint and running # step commands. # Restore the trace function in the caller (that may not have been set # for performance reasons) when returning from the current frame. - if self.frame_returning: - caller_frame = self.frame_returning.f_back - if caller_frame and not caller_frame.f_trace: - caller_frame.f_trace = self.trace_dispatch + caller_frame = current_frame.f_back + if caller_frame and not caller_frame.f_trace: + caller_frame.f_trace = self.trace_dispatch # Derived classes and clients can call the following methods # to affect the stepping state. @@ -343,12 +347,10 @@ def set_until(self, frame, lineno=None): def set_step(self): """Stop after one line of code.""" - self._set_caller_tracefunc() self._set_stopinfo(None, None) def set_stepinstr(self): """Stop before the next instruction.""" - self._set_caller_tracefunc() self._set_stopinfo(None, None, opcode=True) def set_next(self, frame): diff --git a/Lib/code.py b/Lib/code.py index 9d124563f728c2..b93902ccf545b3 100644 --- a/Lib/code.py +++ b/Lib/code.py @@ -25,10 +25,10 @@ class InteractiveInterpreter: def __init__(self, locals=None): """Constructor. - The optional 'locals' argument specifies the dictionary in - which code will be executed; it defaults to a newly created - dictionary with key "__name__" set to "__console__" and key - "__doc__" set to None. + The optional 'locals' argument specifies a mapping to use as the + namespace in which code will be executed; it defaults to a newly + created dictionary with key "__name__" set to "__console__" and + key "__doc__" set to None. """ if locals is None: @@ -94,7 +94,7 @@ def runcode(self, code): except: self.showtraceback() - def showsyntaxerror(self, filename=None): + def showsyntaxerror(self, filename=None, **kwargs): """Display the syntax error that just occurred. This doesn't display a stack trace because there isn't one. @@ -106,6 +106,7 @@ def showsyntaxerror(self, filename=None): The output is written by self.write(), below. """ + colorize = kwargs.pop('colorize', False) type, value, tb = sys.exc_info() sys.last_exc = value sys.last_type = type @@ -123,7 +124,7 @@ def showsyntaxerror(self, filename=None): value = SyntaxError(msg, (filename, lineno, offset, line)) sys.last_exc = sys.last_value = value if sys.excepthook is sys.__excepthook__: - lines = traceback.format_exception_only(type, value) + lines = traceback.format_exception_only(type, value, colorize=colorize) self.write(''.join(lines)) else: # If someone has set sys.excepthook, we let that take precedence diff --git a/Lib/dataclasses.py b/Lib/dataclasses.py index 3acd03cd865234..aeafbfbbe6e9c4 100644 --- a/Lib/dataclasses.py +++ b/Lib/dataclasses.py @@ -1199,10 +1199,17 @@ def _dataclass_setstate(self, state): def _get_slots(cls): match cls.__dict__.get('__slots__'): - # A class which does not define __slots__ at all is equivalent - # to a class defining __slots__ = ('__dict__', '__weakref__') + # `__dictoffset__` and `__weakrefoffset__` can tell us whether + # the base type has dict/weakref slots, in a way that works correctly + # for both Python classes and C extension types. Extension types + # don't use `__slots__` for slot creation case None: - yield from ('__dict__', '__weakref__') + slots = [] + if getattr(cls, '__weakrefoffset__', -1) != 0: + slots.append('__weakref__') + if getattr(cls, '__dictrefoffset__', -1) != 0: + slots.append('__dict__') + yield from slots case str(slot): yield slot # Slots may be any iterable, but we cannot handle an iterator diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py index d1b4c7df4f445f..ab3c3031ef590c 100644 --- a/Lib/email/_header_value_parser.py +++ b/Lib/email/_header_value_parser.py @@ -956,6 +956,7 @@ class _InvalidEwError(errors.HeaderParseError): DOT = ValueTerminal('.', 'dot') ListSeparator = ValueTerminal(',', 'list-separator') ListSeparator.as_ew_allowed = False +ListSeparator.syntactic_break = False RouteComponentMarker = ValueTerminal('@', 'route-component-marker') # @@ -2784,11 +2785,15 @@ def _refold_parse_tree(parse_tree, *, policy): # max_line_length 0/None means no limit, ie: infinitely long. maxlen = policy.max_line_length or sys.maxsize encoding = 'utf-8' if policy.utf8 else 'us-ascii' - lines = [''] - last_ew = None + lines = [''] # Folded lines to be output + leading_whitespace = '' # When we have whitespace between two encoded + # words, we may need to encode the whitespace + # at the beginning of the second word. + last_ew = None # Points to the last encoded character if there's an ew on + # the line last_charset = None wrap_as_ew_blocked = 0 - want_encoding = False + want_encoding = False # This is set to True if we need to encode this part end_ew_not_allowed = Terminal('', 'wrap_as_ew_blocked') parts = list(parse_tree) while parts: @@ -2812,10 +2817,12 @@ def _refold_parse_tree(parse_tree, *, policy): # 'charset' property on the policy. charset = 'utf-8' want_encoding = True + if part.token_type == 'mime-parameters': # Mime parameter folding (using RFC2231) is extra special. _fold_mime_parameters(part, lines, maxlen, encoding) continue + if want_encoding and not wrap_as_ew_blocked: if not part.as_ew_allowed: want_encoding = False @@ -2838,7 +2845,9 @@ def _refold_parse_tree(parse_tree, *, policy): if not hasattr(part, 'encode'): # It's not a Terminal, do each piece individually. parts = list(part) + parts - else: + want_encoding = False + continue + elif part.as_ew_allowed: # It's a terminal, wrap it as an encoded word, possibly # combining it with previously encoded words if allowed. if (last_ew is not None and @@ -2847,21 +2856,44 @@ def _refold_parse_tree(parse_tree, *, policy): last_charset == 'utf-8' and charset != 'us-ascii')): last_ew = None last_ew = _fold_as_ew(tstr, lines, maxlen, last_ew, - part.ew_combine_allowed, charset) + part.ew_combine_allowed, charset, leading_whitespace) + # This whitespace has been added to the lines in _fold_as_ew() + # so clear it now. + leading_whitespace = '' last_charset = charset - want_encoding = False - continue + want_encoding = False + continue + else: + # It's a terminal which should be kept non-encoded + # (e.g. a ListSeparator). + last_ew = None + want_encoding = False + # fall through + if len(tstr) <= maxlen - len(lines[-1]): lines[-1] += tstr continue + # This part is too long to fit. The RFC wants us to break at # "major syntactic breaks", so unless we don't consider this # to be one, check if it will fit on the next line by itself. + leading_whitespace = '' if (part.syntactic_break and len(tstr) + 1 <= maxlen): newline = _steal_trailing_WSP_if_exists(lines) if newline or part.startswith_fws(): + # We're going to fold the data onto a new line here. Due to + # the way encoded strings handle continuation lines, we need to + # be prepared to encode any whitespace if the next line turns + # out to start with an encoded word. lines.append(newline + tstr) + + whitespace_accumulator = [] + for char in lines[-1]: + if char not in WSP: + break + whitespace_accumulator.append(char) + leading_whitespace = ''.join(whitespace_accumulator) last_ew = None continue if not hasattr(part, 'encode'): @@ -2885,9 +2917,10 @@ def _refold_parse_tree(parse_tree, *, policy): else: # We can't fold it onto the next line either... lines[-1] += tstr + return policy.linesep.join(lines) + policy.linesep -def _fold_as_ew(to_encode, lines, maxlen, last_ew, ew_combine_allowed, charset): +def _fold_as_ew(to_encode, lines, maxlen, last_ew, ew_combine_allowed, charset, leading_whitespace): """Fold string to_encode into lines as encoded word, combining if allowed. Return the new value for last_ew, or None if ew_combine_allowed is False. @@ -2902,7 +2935,7 @@ def _fold_as_ew(to_encode, lines, maxlen, last_ew, ew_combine_allowed, charset): to_encode = str( get_unstructured(lines[-1][last_ew:] + to_encode)) lines[-1] = lines[-1][:last_ew] - if to_encode[0] in WSP: + elif to_encode[0] in WSP: # We're joining this to non-encoded text, so don't encode # the leading blank. leading_wsp = to_encode[0] @@ -2910,6 +2943,7 @@ def _fold_as_ew(to_encode, lines, maxlen, last_ew, ew_combine_allowed, charset): if (len(lines[-1]) == maxlen): lines.append(_steal_trailing_WSP_if_exists(lines)) lines[-1] += leading_wsp + trailing_wsp = '' if to_encode[-1] in WSP: # Likewise for the trailing space. @@ -2929,11 +2963,20 @@ def _fold_as_ew(to_encode, lines, maxlen, last_ew, ew_combine_allowed, charset): while to_encode: remaining_space = maxlen - len(lines[-1]) - text_space = remaining_space - chrome_len + text_space = remaining_space - chrome_len - len(leading_whitespace) if text_space <= 0: lines.append(' ') continue + # If we are at the start of a continuation line, prepend whitespace + # (we only want to do this when the line starts with an encoded word + # but if we're folding in this helper function, then we know that we + # are going to be writing out an encoded word.) + if len(lines) > 1 and len(lines[-1]) == 1 and leading_whitespace: + encoded_word = _ew.encode(leading_whitespace, charset=encode_as) + lines[-1] += encoded_word + leading_whitespace = '' + to_encode_word = to_encode[:text_space] encoded_word = _ew.encode(to_encode_word, charset=encode_as) excess = len(encoded_word) - remaining_space diff --git a/Lib/glob.py b/Lib/glob.py index 6088de00a67a99..c506e0e2157c51 100644 --- a/Lib/glob.py +++ b/Lib/glob.py @@ -523,43 +523,6 @@ def select_exists(self, path, exists=False): except OSError: pass - @classmethod - def walk(cls, root, top_down, on_error, follow_symlinks): - """Walk the directory tree from the given root, similar to os.walk(). - """ - paths = [root] - while paths: - path = paths.pop() - if isinstance(path, tuple): - yield path - continue - try: - with cls.scandir(path) as scandir_it: - dirnames = [] - filenames = [] - if not top_down: - paths.append((path, dirnames, filenames)) - for entry in scandir_it: - name = entry.name - try: - if entry.is_dir(follow_symlinks=follow_symlinks): - if not top_down: - paths.append(cls.parse_entry(entry)) - dirnames.append(name) - else: - filenames.append(name) - except OSError: - filenames.append(name) - except OSError as error: - if on_error is not None: - on_error(error) - else: - if top_down: - yield path, dirnames, filenames - if dirnames: - prefix = cls.add_slash(path) - paths += [cls.concat_path(prefix, d) for d in reversed(dirnames)] - class _StringGlobber(_Globber): lstat = staticmethod(os.lstat) diff --git a/Lib/idlelib/config.py b/Lib/idlelib/config.py index 92992fd9cce9cd..6a5acac9be8888 100644 --- a/Lib/idlelib/config.py +++ b/Lib/idlelib/config.py @@ -158,8 +158,9 @@ def __init__(self, _utest=False): self.defaultCfg = {} self.userCfg = {} self.cfg = {} # TODO use to select userCfg vs defaultCfg + + # See https://bugs.python.org/issue4630#msg356516 for following. # self.blink_off_time = ['insertofftime'] - # See https:/bugs.python.org/issue4630, msg356516. if not _utest: self.CreateConfigHandlers() diff --git a/Lib/idlelib/pyshell.py b/Lib/idlelib/pyshell.py index 1524fccd5d20f8..d8b2652d5d7979 100755 --- a/Lib/idlelib/pyshell.py +++ b/Lib/idlelib/pyshell.py @@ -11,15 +11,9 @@ "Your Python may not be configured for Tk. **", file=sys.__stderr__) raise SystemExit(1) -# Valid arguments for the ...Awareness call below are defined in the following. -# https://msdn.microsoft.com/en-us/library/windows/desktop/dn280512(v=vs.85).aspx if sys.platform == 'win32': - try: - import ctypes - PROCESS_SYSTEM_DPI_AWARE = 1 # Int required. - ctypes.OleDLL('shcore').SetProcessDpiAwareness(PROCESS_SYSTEM_DPI_AWARE) - except (ImportError, AttributeError, OSError): - pass + from idlelib.util import fix_win_hidpi + fix_win_hidpi() from tkinter import messagebox diff --git a/Lib/idlelib/util.py b/Lib/idlelib/util.py index a7ae74b0579004..e05604ab4853f6 100644 --- a/Lib/idlelib/util.py +++ b/Lib/idlelib/util.py @@ -12,11 +12,26 @@ * std streams (pyshell, run), * warning stuff (pyshell, run). """ +import sys # .pyw is for Windows; .pyi is for typing stub files. # The extension order is needed for iomenu open/save dialogs. py_extensions = ('.py', '.pyw', '.pyi') + +# Fix for HiDPI screens on Windows. CALL BEFORE ANY TK OPERATIONS! +# URL for arguments for the ...Awareness call below. +# https://msdn.microsoft.com/en-us/library/windows/desktop/dn280512(v=vs.85).aspx +if sys.platform == 'win32': # pragma: no cover + def fix_win_hidpi(): # Called in pyshell and turtledemo. + try: + import ctypes + PROCESS_SYSTEM_DPI_AWARE = 1 # Int required. + ctypes.OleDLL('shcore').SetProcessDpiAwareness(PROCESS_SYSTEM_DPI_AWARE) + except (ImportError, AttributeError, OSError): + pass + + if __name__ == '__main__': from unittest import main main('idlelib.idle_test.test_util', verbosity=2) diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py index db446776901fc3..7742855e8b24db 100644 --- a/Lib/importlib/_bootstrap_external.py +++ b/Lib/importlib/_bootstrap_external.py @@ -472,6 +472,7 @@ def _write_atomic(path, data, mode=0o666): # Python 3.13a1 3568 (Change semantics of END_FOR) # Python 3.13a5 3569 (Specialize CONTAINS_OP) # Python 3.13a6 3570 (Add __firstlineno__ class attribute) +# Python 3.13b1 3571 (Fix miscompilation of private names in generic classes) # Python 3.14 will start with 3600 @@ -488,7 +489,7 @@ def _write_atomic(path, data, mode=0o666): # Whenever MAGIC_NUMBER is changed, the ranges in the magic_values array # in PC/launcher.c must also be updated. -MAGIC_NUMBER = (3570).to_bytes(2, 'little') + b'\r\n' +MAGIC_NUMBER = (3571).to_bytes(2, 'little') + b'\r\n' _RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c diff --git a/Lib/importlib/resources/_common.py b/Lib/importlib/resources/_common.py index e18082fb3d26a0..ca5b06743b46a6 100644 --- a/Lib/importlib/resources/_common.py +++ b/Lib/importlib/resources/_common.py @@ -25,6 +25,8 @@ def package_to_anchor(func): >>> files('a', 'b') Traceback (most recent call last): TypeError: files() takes from 0 to 1 positional arguments but 2 were given + + Remove this compatibility in Python 3.14. """ undefined = object() diff --git a/Lib/importlib/resources/readers.py b/Lib/importlib/resources/readers.py index c3cdf769cbecb0..b86cdeff57c4c2 100644 --- a/Lib/importlib/resources/readers.py +++ b/Lib/importlib/resources/readers.py @@ -1,7 +1,10 @@ import collections +import contextlib import itertools import pathlib import operator +import re +import warnings import zipfile from . import abc @@ -62,7 +65,7 @@ class MultiplexedPath(abc.Traversable): """ def __init__(self, *paths): - self._paths = list(map(pathlib.Path, remove_duplicates(paths))) + self._paths = list(map(_ensure_traversable, remove_duplicates(paths))) if not self._paths: message = 'MultiplexedPath must contain at least one path' raise FileNotFoundError(message) @@ -130,7 +133,36 @@ class NamespaceReader(abc.TraversableResources): def __init__(self, namespace_path): if 'NamespacePath' not in str(namespace_path): raise ValueError('Invalid path') - self.path = MultiplexedPath(*list(namespace_path)) + self.path = MultiplexedPath(*map(self._resolve, namespace_path)) + + @classmethod + def _resolve(cls, path_str) -> abc.Traversable: + r""" + Given an item from a namespace path, resolve it to a Traversable. + + path_str might be a directory on the filesystem or a path to a + zipfile plus the path within the zipfile, e.g. ``/foo/bar`` or + ``/foo/baz.zip/inner_dir`` or ``foo\baz.zip\inner_dir\sub``. + """ + (dir,) = (cand for cand in cls._candidate_paths(path_str) if cand.is_dir()) + return dir + + @classmethod + def _candidate_paths(cls, path_str): + yield pathlib.Path(path_str) + yield from cls._resolve_zip_path(path_str) + + @staticmethod + def _resolve_zip_path(path_str): + for match in reversed(list(re.finditer(r'[\\/]', path_str))): + with contextlib.suppress( + FileNotFoundError, + IsADirectoryError, + NotADirectoryError, + PermissionError, + ): + inner = path_str[match.end() :].replace('\\', '/') + '/' + yield zipfile.Path(path_str[: match.start()], inner.lstrip('/')) def resource_path(self, resource): """ @@ -142,3 +174,21 @@ def resource_path(self, resource): def files(self): return self.path + + +def _ensure_traversable(path): + """ + Convert deprecated string arguments to traversables (pathlib.Path). + + Remove with Python 3.15. + """ + if not isinstance(path, str): + return path + + warnings.warn( + "String arguments are deprecated. Pass a Traversable instead.", + DeprecationWarning, + stacklevel=3, + ) + + return pathlib.Path(path) diff --git a/Lib/inspect.py b/Lib/inspect.py index 84260b251a4fb8..e6e49a4ffa673a 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -3106,6 +3106,8 @@ def _bind(self, args, kwargs, *, partial=False): parameters_ex = () arg_vals = iter(args) + pos_only_param_in_kwargs = [] + while True: # Let's iterate through the positional arguments and corresponding # parameters @@ -3126,10 +3128,10 @@ def _bind(self, args, kwargs, *, partial=False): break elif param.name in kwargs: if param.kind == _POSITIONAL_ONLY: - msg = '{arg!r} parameter is positional only, ' \ - 'but was passed as a keyword' - msg = msg.format(arg=param.name) - raise TypeError(msg) from None + # Raise a TypeError once we are sure there is no + # **kwargs param later. + pos_only_param_in_kwargs.append(param) + continue parameters_ex = (param,) break elif (param.kind == _VAR_KEYWORD or @@ -3211,20 +3213,22 @@ def _bind(self, args, kwargs, *, partial=False): format(arg=param_name)) from None else: - if param.kind == _POSITIONAL_ONLY: - # This should never happen in case of a properly built - # Signature object (but let's have this check here - # to ensure correct behaviour just in case) - raise TypeError('{arg!r} parameter is positional only, ' - 'but was passed as a keyword'. \ - format(arg=param.name)) - arguments[param_name] = arg_val if kwargs: if kwargs_param is not None: # Process our '**kwargs'-like parameter arguments[kwargs_param.name] = kwargs + elif pos_only_param_in_kwargs: + raise TypeError( + 'got some positional-only arguments passed as ' + 'keyword arguments: {arg!r}'.format( + arg=', '.join( + param.name + for param in pos_only_param_in_kwargs + ), + ), + ) else: raise TypeError( 'got an unexpected keyword argument {arg!r}'.format( diff --git a/Lib/logging/config.py b/Lib/logging/config.py index 860e4751207470..9de84e527b18ac 100644 --- a/Lib/logging/config.py +++ b/Lib/logging/config.py @@ -725,16 +725,16 @@ def add_filters(self, filterer, filters): def _configure_queue_handler(self, klass, **kwargs): if 'queue' in kwargs: - q = kwargs['queue'] + q = kwargs.pop('queue') else: q = queue.Queue() # unbounded - rhl = kwargs.get('respect_handler_level', False) - if 'listener' in kwargs: - lklass = kwargs['listener'] - else: - lklass = logging.handlers.QueueListener - listener = lklass(q, *kwargs.get('handlers', []), respect_handler_level=rhl) - handler = klass(q) + + rhl = kwargs.pop('respect_handler_level', False) + lklass = kwargs.pop('listener', logging.handlers.QueueListener) + handlers = kwargs.pop('handlers', []) + + listener = lklass(q, *handlers, respect_handler_level=rhl) + handler = klass(q, **kwargs) handler.listener = listener return handler @@ -781,8 +781,12 @@ def configure_handler(self, config): # raise ValueError('No handlers specified for a QueueHandler') if 'queue' in config: from multiprocessing.queues import Queue as MPQueue + from multiprocessing import Manager as MM + proxy_queue = MM().Queue() + proxy_joinable_queue = MM().JoinableQueue() qspec = config['queue'] - if not isinstance(qspec, (queue.Queue, MPQueue)): + if not isinstance(qspec, (queue.Queue, MPQueue, + type(proxy_queue), type(proxy_joinable_queue))): if isinstance(qspec, str): q = self.resolve(qspec) if not callable(q): diff --git a/Lib/ntpath.py b/Lib/ntpath.py index b833e0bad2645f..83e2d3b865757c 100644 --- a/Lib/ntpath.py +++ b/Lib/ntpath.py @@ -168,19 +168,12 @@ def splitdrive(p): try: - from nt import _path_splitroot_ex + from nt import _path_splitroot_ex as splitroot except ImportError: def splitroot(p): - """Split a pathname into drive, root and tail. The drive is defined - exactly as in splitdrive(). On Windows, the root may be a single path - separator or an empty string. The tail contains anything after the root. - For example: - - splitroot('//server/share/') == ('//server/share', '/', '') - splitroot('C:/Users/Barney') == ('C:', '/', 'Users/Barney') - splitroot('C:///spam///ham') == ('C:', '/', '//spam///ham') - splitroot('Windows/notepad') == ('', '', 'Windows/notepad') - """ + """Split a pathname into drive, root and tail. + + The tail contains anything after the root.""" p = os.fspath(p) if isinstance(p, bytes): sep = b'\\' @@ -220,23 +213,6 @@ def splitroot(p): else: # Relative path, e.g. Windows return empty, empty, p -else: - def splitroot(p): - """Split a pathname into drive, root and tail. The drive is defined - exactly as in splitdrive(). On Windows, the root may be a single path - separator or an empty string. The tail contains anything after the root. - For example: - - splitroot('//server/share/') == ('//server/share', '/', '') - splitroot('C:/Users/Barney') == ('C:', '/', 'Users/Barney') - splitroot('C:///spam///ham') == ('C:', '/', '//spam///ham') - splitroot('Windows/notepad') == ('', '', 'Windows/notepad') - """ - p = os.fspath(p) - if isinstance(p, bytes): - drive, root, tail = _path_splitroot_ex(os.fsdecode(p)) - return os.fsencode(drive), os.fsencode(root), os.fsencode(tail) - return _path_splitroot_ex(p) # Split a path in head (everything up to the last '/') and tail (the @@ -288,21 +264,6 @@ def dirname(p): return split(p)[0] -# Is a path a junction? - -if hasattr(os.stat_result, 'st_reparse_tag'): - def isjunction(path): - """Test whether a path is a junction""" - try: - st = os.lstat(path) - except (OSError, ValueError, AttributeError): - return False - return st.st_reparse_tag == stat.IO_REPARSE_TAG_MOUNT_POINT -else: - # Use genericpath.isjunction as imported above - pass - - # Is a path a mount point? # Any drive letter root (eg c:\) # Any share UNC (eg \\server\share) @@ -553,7 +514,7 @@ def expandvars(path): # Previously, this function also truncated pathnames to 8+3 format, # but as this module is called "ntpath", that's obviously wrong! try: - from nt import _path_normpath + from nt import _path_normpath as normpath except ImportError: def normpath(path): @@ -592,14 +553,6 @@ def normpath(path): comps.append(curdir) return prefix + sep.join(comps) -else: - def normpath(path): - """Normalize path, eliminating double slashes, etc.""" - path = os.fspath(path) - if isinstance(path, bytes): - return os.fsencode(_path_normpath(os.fsdecode(path))) or b"." - return _path_normpath(path) or "." - def _abspath_fallback(path): """Return the absolute version of a path as a fallback function in case @@ -911,13 +864,15 @@ def commonpath(paths): try: - # The isdir(), isfile(), islink() and exists() implementations in - # genericpath use os.stat(). This is overkill on Windows. Use simpler + # The isdir(), isfile(), islink(), exists() and lexists() implementations + # in genericpath use os.stat(). This is overkill on Windows. Use simpler # builtin functions if they are available. from nt import _path_isdir as isdir from nt import _path_isfile as isfile from nt import _path_islink as islink + from nt import _path_isjunction as isjunction from nt import _path_exists as exists + from nt import _path_lexists as lexists except ImportError: # Use genericpath.* as imported above pass diff --git a/Lib/os.py b/Lib/os.py index 7661ce68ca3be2..0408e2db79e66e 100644 --- a/Lib/os.py +++ b/Lib/os.py @@ -281,6 +281,10 @@ def renames(old, new): __all__.extend(["makedirs", "removedirs", "renames"]) +# Private sentinel that makes walk() classify all symlinks and junctions as +# regular files. +_walk_symlinks_as_files = object() + def walk(top, topdown=True, onerror=None, followlinks=False): """Directory tree generator. @@ -382,7 +386,10 @@ def walk(top, topdown=True, onerror=None, followlinks=False): break try: - is_dir = entry.is_dir() + if followlinks is _walk_symlinks_as_files: + is_dir = entry.is_dir(follow_symlinks=False) and not entry.is_junction() + else: + is_dir = entry.is_dir() except OSError: # If is_dir() raises an OSError, consider the entry not to # be a directory, same behaviour as os.path.isdir(). @@ -471,24 +478,59 @@ def fwalk(top=".", topdown=True, onerror=None, *, follow_symlinks=False, dir_fd= """ sys.audit("os.fwalk", top, topdown, onerror, follow_symlinks, dir_fd) top = fspath(top) - # Note: To guard against symlink races, we use the standard - # lstat()/open()/fstat() trick. - if not follow_symlinks: - orig_st = stat(top, follow_symlinks=False, dir_fd=dir_fd) - topfd = open(top, O_RDONLY | O_NONBLOCK, dir_fd=dir_fd) + stack = [(_fwalk_walk, (True, dir_fd, top, top, None))] + isbytes = isinstance(top, bytes) try: - if (follow_symlinks or (st.S_ISDIR(orig_st.st_mode) and - path.samestat(orig_st, stat(topfd)))): - yield from _fwalk(topfd, top, isinstance(top, bytes), - topdown, onerror, follow_symlinks) + while stack: + yield from _fwalk(stack, isbytes, topdown, onerror, follow_symlinks) finally: - close(topfd) - - def _fwalk(topfd, toppath, isbytes, topdown, onerror, follow_symlinks): + # Close any file descriptors still on the stack. + while stack: + action, value = stack.pop() + if action == _fwalk_close: + close(value) + + # Each item in the _fwalk() stack is a pair (action, args). + _fwalk_walk = 0 # args: (isroot, dirfd, toppath, topname, entry) + _fwalk_yield = 1 # args: (toppath, dirnames, filenames, topfd) + _fwalk_close = 2 # args: dirfd + + def _fwalk(stack, isbytes, topdown, onerror, follow_symlinks): # Note: This uses O(depth of the directory tree) file descriptors: if # necessary, it can be adapted to only require O(1) FDs, see issue # #13734. + action, value = stack.pop() + if action == _fwalk_close: + close(value) + return + elif action == _fwalk_yield: + yield value + return + assert action == _fwalk_walk + isroot, dirfd, toppath, topname, entry = value + try: + if not follow_symlinks: + # Note: To guard against symlink races, we use the standard + # lstat()/open()/fstat() trick. + if entry is None: + orig_st = stat(topname, follow_symlinks=False, dir_fd=dirfd) + else: + orig_st = entry.stat(follow_symlinks=False) + topfd = open(topname, O_RDONLY | O_NONBLOCK, dir_fd=dirfd) + except OSError as err: + if isroot: + raise + if onerror is not None: + onerror(err) + return + stack.append((_fwalk_close, topfd)) + if not follow_symlinks: + if isroot and not st.S_ISDIR(orig_st.st_mode): + return + if not path.samestat(orig_st, stat(topfd)): + return + scandir_it = scandir(topfd) dirs = [] nondirs = [] @@ -514,31 +556,18 @@ def _fwalk(topfd, toppath, isbytes, topdown, onerror, follow_symlinks): if topdown: yield toppath, dirs, nondirs, topfd + else: + stack.append((_fwalk_yield, (toppath, dirs, nondirs, topfd))) - for name in dirs if entries is None else zip(dirs, entries): - try: - if not follow_symlinks: - if topdown: - orig_st = stat(name, dir_fd=topfd, follow_symlinks=False) - else: - assert entries is not None - name, entry = name - orig_st = entry.stat(follow_symlinks=False) - dirfd = open(name, O_RDONLY | O_NONBLOCK, dir_fd=topfd) - except OSError as err: - if onerror is not None: - onerror(err) - continue - try: - if follow_symlinks or path.samestat(orig_st, stat(dirfd)): - dirpath = path.join(toppath, name) - yield from _fwalk(dirfd, dirpath, isbytes, - topdown, onerror, follow_symlinks) - finally: - close(dirfd) - - if not topdown: - yield toppath, dirs, nondirs, topfd + toppath = path.join(toppath, toppath[:0]) # Add trailing slash. + if entries is None: + stack.extend( + (_fwalk_walk, (False, topfd, toppath + name, name, None)) + for name in dirs[::-1]) + else: + stack.extend( + (_fwalk_walk, (False, topfd, toppath + name, name, entry)) + for name, entry in zip(dirs[::-1], entries[::-1])) __all__.append("fwalk") diff --git a/Lib/pathlib/_abc.py b/Lib/pathlib/_abc.py index 06c10e8e4612ca..4d24146aa53f3b 100644 --- a/Lib/pathlib/_abc.py +++ b/Lib/pathlib/_abc.py @@ -225,15 +225,13 @@ def with_suffix(self, suffix): string, remove the suffix from the path. """ stem = self.stem - if not suffix: - return self.with_name(stem) - elif not stem: + if not stem: # If the stem is empty, we can't make the suffix non-empty. raise ValueError(f"{self!r} has an empty name") - elif suffix.startswith('.') and len(suffix) > 1: - return self.with_name(stem + suffix) - else: + elif suffix and not (suffix.startswith('.') and len(suffix) > 1): raise ValueError(f"Invalid suffix {suffix!r}") + else: + return self.with_name(stem + suffix) def relative_to(self, other, *, walk_up=False): """Return the relative path to another path identified by the passed @@ -698,7 +696,37 @@ def rglob(self, pattern, *, case_sensitive=None, recurse_symlinks=True): def walk(self, top_down=True, on_error=None, follow_symlinks=False): """Walk the directory tree from this directory, similar to os.walk().""" - return self._globber.walk(self, top_down, on_error, follow_symlinks) + paths = [self] + while paths: + path = paths.pop() + if isinstance(path, tuple): + yield path + continue + dirnames = [] + filenames = [] + if not top_down: + paths.append((path, dirnames, filenames)) + try: + for child in path.iterdir(): + try: + if child.is_dir(follow_symlinks=follow_symlinks): + if not top_down: + paths.append(child) + dirnames.append(child.name) + else: + filenames.append(child.name) + except OSError: + filenames.append(child.name) + except OSError as error: + if on_error is not None: + on_error(error) + if not top_down: + while not isinstance(paths.pop(), tuple): + pass + continue + if top_down: + yield path, dirnames, filenames + paths += [path.joinpath(d) for d in reversed(dirnames)] def absolute(self): """Return an absolute version of this path diff --git a/Lib/pathlib/_local.py b/Lib/pathlib/_local.py index b1e678aceb9ce8..fbc8db78b86e61 100644 --- a/Lib/pathlib/_local.py +++ b/Lib/pathlib/_local.py @@ -623,7 +623,9 @@ def walk(self, top_down=True, on_error=None, follow_symlinks=False): """Walk the directory tree from this directory, similar to os.walk().""" sys.audit("pathlib.Path.walk", self, on_error, follow_symlinks) root_dir = str(self) - results = self._globber.walk(root_dir, top_down, on_error, follow_symlinks) + if not follow_symlinks: + follow_symlinks = os._walk_symlinks_as_files + results = os.walk(root_dir, top_down, on_error, follow_symlinks) for path_str, dirnames, filenames in results: if root_dir == '.': path_str = path_str[2:] diff --git a/Lib/pdb.py b/Lib/pdb.py index e507a9bb896611..0f791d71950099 100755 --- a/Lib/pdb.py +++ b/Lib/pdb.py @@ -392,9 +392,12 @@ def setup(self, f, tb): self.tb_lineno[tb.tb_frame] = lineno tb = tb.tb_next self.curframe = self.stack[self.curindex][0] - # The f_locals dictionary is updated from the actual frame - # locals whenever the .f_locals accessor is called, so we - # cache it here to ensure that modifications are not overwritten. + # The f_locals dictionary used to be updated from the actual frame + # locals whenever the .f_locals accessor was called, so it was + # cached here to ensure that modifications were not overwritten. While + # the caching is no longer required now that f_locals is a direct proxy + # on optimized frames, it's also harmless, so the code structure has + # been left unchanged. self.curframe_locals = self.curframe.f_locals self.set_convenience_variable(self.curframe, '_frame', self.curframe) diff --git a/Lib/platform.py b/Lib/platform.py index ebaba37563120e..5958382276e79c 100755 --- a/Lib/platform.py +++ b/Lib/platform.py @@ -1153,17 +1153,16 @@ def _sys_version(sys_version=None): if result is not None: return result - sys_version_parser = re.compile( - r'([\w.+]+)\s*' # "version" - r'\(#?([^,]+)' # "(#buildno" - r'(?:,\s*([\w ]*)' # ", builddate" - r'(?:,\s*([\w :]*))?)?\)\s*' # ", buildtime)" - r'\[([^\]]+)\]?', re.ASCII) # "[compiler]" - if sys.platform.startswith('java'): # Jython + jython_sys_version_parser = re.compile( + r'([\w.+]+)\s*' # "version" + r'\(#?([^,]+)' # "(#buildno" + r'(?:,\s*([\w ]*)' # ", builddate" + r'(?:,\s*([\w :]*))?)?\)\s*' # ", buildtime)" + r'\[([^\]]+)\]?', re.ASCII) # "[compiler]" name = 'Jython' - match = sys_version_parser.match(sys_version) + match = jython_sys_version_parser.match(sys_version) if match is None: raise ValueError( 'failed to parse Jython sys.version: %s' % @@ -1190,7 +1189,14 @@ def _sys_version(sys_version=None): else: # CPython - match = sys_version_parser.match(sys_version) + cpython_sys_version_parser = re.compile( + r'([\w.+]+)\s*' # "version" + r'(?:experimental free-threading build\s+)?' # "free-threading-build" + r'\(#?([^,]+)' # "(#buildno" + r'(?:,\s*([\w ]*)' # ", builddate" + r'(?:,\s*([\w :]*))?)?\)\s*' # ", buildtime)" + r'\[([^\]]+)\]?', re.ASCII) # "[compiler]" + match = cpython_sys_version_parser.match(sys_version) if match is None: raise ValueError( 'failed to parse CPython sys.version: %s' % diff --git a/Lib/posixpath.py b/Lib/posixpath.py index b4547d7893b1cd..47b2aa572e5c65 100644 --- a/Lib/posixpath.py +++ b/Lib/posixpath.py @@ -135,18 +135,12 @@ def splitdrive(p): try: - from posix import _path_splitroot_ex + from posix import _path_splitroot_ex as splitroot except ImportError: def splitroot(p): - """Split a pathname into drive, root and tail. On Posix, drive is always - empty; the root may be empty, a single slash, or two slashes. The tail - contains anything after the root. For example: - - splitroot('foo/bar') == ('', '', 'foo/bar') - splitroot('/foo/bar') == ('', '/', 'foo/bar') - splitroot('//foo/bar') == ('', '//', 'foo/bar') - splitroot('///foo/bar') == ('', '/', '//foo/bar') - """ + """Split a pathname into drive, root and tail. + + The tail contains anything after the root.""" p = os.fspath(p) if isinstance(p, bytes): sep = b'/' @@ -164,23 +158,6 @@ def splitroot(p): # Precisely two leading slashes, e.g.: '//foo'. Implementation defined per POSIX, see # https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap04.html#tag_04_13 return empty, p[:2], p[2:] -else: - def splitroot(p): - """Split a pathname into drive, root and tail. On Posix, drive is always - empty; the root may be empty, a single slash, or two slashes. The tail - contains anything after the root. For example: - - splitroot('foo/bar') == ('', '', 'foo/bar') - splitroot('/foo/bar') == ('', '/', 'foo/bar') - splitroot('//foo/bar') == ('', '//', 'foo/bar') - splitroot('///foo/bar') == ('', '/', '//foo/bar') - """ - p = os.fspath(p) - if isinstance(p, bytes): - # Optimisation: the drive is always empty - _, root, tail = _path_splitroot_ex(os.fsdecode(p)) - return b'', os.fsencode(root), os.fsencode(tail) - return _path_splitroot_ex(p) # Return the tail (basename) part of a path, same as split(path)[1]. @@ -363,7 +340,7 @@ def expandvars(path): # if it contains symbolic links! try: - from posix import _path_normpath + from posix import _path_normpath as normpath except ImportError: def normpath(path): @@ -394,14 +371,6 @@ def normpath(path): path = initial_slashes + sep.join(comps) return path or dot -else: - def normpath(path): - """Normalize path, eliminating double slashes, etc.""" - path = os.fspath(path) - if isinstance(path, bytes): - return os.fsencode(_path_normpath(os.fsdecode(path))) or b"." - return _path_normpath(path) or "." - def abspath(path): """Return an absolute path.""" @@ -471,26 +440,26 @@ def realpath(filename, *, strict=False): if not stat.S_ISLNK(st.st_mode): path = newpath continue + if newpath in seen: + # Already seen this path + path = seen[newpath] + if path is not None: + # use cached value + continue + # The symlink is not resolved, so we must have a symlink loop. + if strict: + # Raise OSError(errno.ELOOP) + os.stat(newpath) + path = newpath + continue + target = os.readlink(newpath) except OSError: if strict: raise path = newpath continue # Resolve the symbolic link - if newpath in seen: - # Already seen this path - path = seen[newpath] - if path is not None: - # use cached value - continue - # The symlink is not resolved, so we must have a symlink loop. - if strict: - # Raise OSError(errno.ELOOP) - os.stat(newpath) - path = newpath - continue seen[newpath] = None # not resolved symlink - target = os.readlink(newpath) if target.startswith(sep): # Symlink target is absolute; reset resolved path. path = sep diff --git a/Lib/pydoc.py b/Lib/pydoc.py index 55ccf2152c26cb..922e7fb42d8332 100755 --- a/Lib/pydoc.py +++ b/Lib/pydoc.py @@ -2008,7 +2008,7 @@ def interact(self): if (len(request) > 2 and request[0] == request[-1] in ("'", '"') and request[0] not in request[1:-1]): request = request[1:-1] - if request.lower() in ('q', 'quit'): break + if request.lower() in ('q', 'quit', 'exit'): break if request == 'help': self.intro() else: @@ -2060,7 +2060,7 @@ def intro(self): enter "modules spam". To quit this help utility and return to the interpreter, -enter "q" or "quit". +enter "q", "quit" or "exit". '''.format('%d.%d' % sys.version_info[:2])) def list(self, items, columns=4, width=80): diff --git a/Lib/pydoc_data/topics.py b/Lib/pydoc_data/topics.py index 4643df80e44aaf..f5ba27df0bd521 100644 --- a/Lib/pydoc_data/topics.py +++ b/Lib/pydoc_data/topics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Autogenerated by Sphinx on Wed May 8 11:11:17 2024 +# Autogenerated by Sphinx on Wed Jun 5 16:43:53 2024 # as part of the release process. topics = {'assert': 'The "assert" statement\n' '**********************\n' @@ -5034,6 +5034,12 @@ ' 0\n' ' (Pdb)\n' '\n' + 'Changed in version 3.13: The implementation of **PEP 667** means ' + 'that\n' + 'name assignments made via "pdb" will immediately affect the ' + 'active\n' + 'scope, even when running inside an *optimized scope*.\n' + '\n' 'The module defines the following functions; each enters the ' 'debugger\n' 'in a slightly different way:\n' @@ -5245,7 +5251,8 @@ '* "$_exception": the exception if the frame is raising an ' 'exception\n' '\n' - 'Added in version 3.12.\n' + 'Added in version 3.12: Added the *convenience variable* ' + 'feature.\n' '\n' 'If a file ".pdbrc" exists in the user’s home directory or in ' 'the\n' @@ -5620,24 +5627,22 @@ '\n' 'interact\n' '\n' - ' Start an interactive interpreter (using the "code" module) ' - 'whose\n' - ' global namespace contains all the (global and local) names ' - 'found in\n' - ' the current scope. Use "exit()" or "quit()" to exit the ' - 'interpreter\n' - ' and return to the debugger.\n' + ' Start an interactive interpreter (using the "code" module) in ' + 'a new\n' + ' global namespace initialised from the local and global ' + 'namespaces\n' + ' for the current scope. Use "exit()" or "quit()" to exit the\n' + ' interpreter and return to the debugger.\n' '\n' ' Note:\n' '\n' - ' Because interact creates a new global namespace with the ' - 'current\n' - ' global and local namespace for execution, assignment to ' - 'variables\n' - ' will not affect the original namespaces. However, ' - 'modification to\n' - ' the mutable objects will be reflected in the original ' - 'namespaces.\n' + ' As "interact" creates a new dedicated namespace for code\n' + ' execution, assignments to variables will not affect the ' + 'original\n' + ' namespaces. However, modifications to any referenced ' + 'mutable\n' + ' objects will be reflected in the original namespaces as ' + 'usual.\n' '\n' ' Added in version 3.2.\n' '\n' @@ -12362,7 +12367,7 @@ 'the\n' ' "LC_NUMERIC" locale in some cases.\n' '\n' - 'str.format_map(mapping)\n' + 'str.format_map(mapping, /)\n' '\n' ' Similar to "str.format(**mapping)", except that ' '"mapping" is used\n' @@ -14766,10 +14771,10 @@ ' The iterator returns "tuple"s containing the "(start_line,\n' ' end_line, start_column, end_column)". The *i-th* tuple ' 'corresponds\n' - ' to the position of the source code that compiled to the *i-th*\n' - ' instruction. Column information is 0-indexed utf-8 byte offsets ' - 'on\n' - ' the given source line.\n' + ' to the position of the source code that compiled to the *i-th* ' + 'code\n' + ' unit. Column information is 0-indexed utf-8 byte offsets on the\n' + ' given source line.\n' '\n' ' This positional information can be missing. A non-exhaustive ' 'lists\n' @@ -14894,16 +14899,16 @@ '| | ' '""f_code"". |\n' '+----------------------------------------------------+----------------------------------------------------+\n' - '| frame.f_locals | The ' - 'dictionary used by the frame to look up local |\n' + '| frame.f_locals | The mapping ' + 'used by the frame to look up local |\n' '| | variables. ' - 'If the frame refers to a function or |\n' - '| | ' - 'comprehension, this may return a write- through |\n' - '| | proxy ' - 'object. Changed in version 3.13: Return a |\n' - '| | proxy for ' - 'functions and comprehensions. |\n' + 'If the frame refers to an *optimized |\n' + '| | scope*, this ' + 'may return a write-through proxy |\n' + '| | object. ' + 'Changed in version 3.13: Return a proxy |\n' + '| | for ' + 'optimized scopes. |\n' '+----------------------------------------------------+----------------------------------------------------+\n' '| frame.f_globals | The ' 'dictionary used by the frame to look up global |\n' @@ -15312,7 +15317,7 @@ '\n' ' Return a shallow copy of the dictionary.\n' '\n' - ' classmethod fromkeys(iterable[, value])\n' + ' classmethod fromkeys(iterable, value=None)\n' '\n' ' Create a new dictionary with keys from *iterable* and ' 'values set\n' @@ -15328,7 +15333,7 @@ 'distinct\n' ' values, use a dict comprehension instead.\n' '\n' - ' get(key[, default])\n' + ' get(key, default=None)\n' '\n' ' Return the value for *key* if *key* is in the ' 'dictionary, else\n' @@ -15380,7 +15385,7 @@ '\n' ' Added in version 3.8.\n' '\n' - ' setdefault(key[, default])\n' + ' setdefault(key, default=None)\n' '\n' ' If *key* is in the dictionary, return its value. If ' 'not, insert\n' diff --git a/Lib/rlcompleter.py b/Lib/rlcompleter.py index 206d6fb511cdf6..23eb0020f42e8a 100644 --- a/Lib/rlcompleter.py +++ b/Lib/rlcompleter.py @@ -35,6 +35,7 @@ import keyword import re import __main__ +import warnings __all__ = ["Completer"] @@ -88,10 +89,11 @@ def complete(self, text, state): return None if state == 0: - if "." in text: - self.matches = self.attr_matches(text) - else: - self.matches = self.global_matches(text) + with warnings.catch_warnings(action="ignore"): + if "." in text: + self.matches = self.attr_matches(text) + else: + self.matches = self.global_matches(text) try: return self.matches[state] except IndexError: diff --git a/Lib/shutil.py b/Lib/shutil.py index c9b4da34b1e19b..b0d49e98cfe5f9 100644 --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -606,37 +606,21 @@ def _rmtree_islink(st): # version vulnerable to race conditions def _rmtree_unsafe(path, onexc): - try: - with os.scandir(path) as scandir_it: - entries = list(scandir_it) - except FileNotFoundError: - return - except OSError as err: - onexc(os.scandir, path, err) - entries = [] - for entry in entries: - fullname = entry.path - try: - is_dir = entry.is_dir(follow_symlinks=False) - except FileNotFoundError: - continue - except OSError: - is_dir = False - - if is_dir and not entry.is_junction(): + def onerror(err): + if not isinstance(err, FileNotFoundError): + onexc(os.scandir, err.filename, err) + results = os.walk(path, topdown=False, onerror=onerror, followlinks=os._walk_symlinks_as_files) + for dirpath, dirnames, filenames in results: + for name in dirnames: + fullname = os.path.join(dirpath, name) try: - if entry.is_symlink(): - # This can only happen if someone replaces - # a directory with a symlink after the call to - # os.scandir or entry.is_dir above. - raise OSError("Cannot call rmtree on a symbolic link") + os.rmdir(fullname) except FileNotFoundError: continue except OSError as err: - onexc(os.path.islink, fullname, err) - continue - _rmtree_unsafe(fullname, onexc) - else: + onexc(os.rmdir, fullname, err) + for name in filenames: + fullname = os.path.join(dirpath, name) try: os.unlink(fullname) except FileNotFoundError: @@ -651,81 +635,76 @@ def _rmtree_unsafe(path, onexc): onexc(os.rmdir, path, err) # Version using fd-based APIs to protect against races -def _rmtree_safe_fd(topfd, path, onexc): +def _rmtree_safe_fd(stack, onexc): + # Each stack item has four elements: + # * func: The first operation to perform: os.lstat, os.close or os.rmdir. + # Walking a directory starts with an os.lstat() to detect symlinks; in + # this case, func is updated before subsequent operations and passed to + # onexc() if an error occurs. + # * dirfd: Open file descriptor, or None if we're processing the top-level + # directory given to rmtree() and the user didn't supply dir_fd. + # * path: Path of file to operate upon. This is passed to onexc() if an + # error occurs. + # * orig_entry: os.DirEntry, or None if we're processing the top-level + # directory given to rmtree(). We used the cached stat() of the entry to + # save a call to os.lstat() when walking subdirectories. + func, dirfd, path, orig_entry = stack.pop() + name = path if orig_entry is None else orig_entry.name try: + if func is os.close: + os.close(dirfd) + return + if func is os.rmdir: + os.rmdir(name, dir_fd=dirfd) + return + + # Note: To guard against symlink races, we use the standard + # lstat()/open()/fstat() trick. + assert func is os.lstat + if orig_entry is None: + orig_st = os.lstat(name, dir_fd=dirfd) + else: + orig_st = orig_entry.stat(follow_symlinks=False) + + func = os.open # For error reporting. + topfd = os.open(name, os.O_RDONLY | os.O_NONBLOCK, dir_fd=dirfd) + + func = os.path.islink # For error reporting. + try: + if not os.path.samestat(orig_st, os.fstat(topfd)): + # Symlinks to directories are forbidden, see GH-46010. + raise OSError("Cannot call rmtree on a symbolic link") + stack.append((os.rmdir, dirfd, path, orig_entry)) + finally: + stack.append((os.close, topfd, path, orig_entry)) + + func = os.scandir # For error reporting. with os.scandir(topfd) as scandir_it: entries = list(scandir_it) - except FileNotFoundError: - return - except OSError as err: - err.filename = path - onexc(os.scandir, path, err) - return - for entry in entries: - fullname = os.path.join(path, entry.name) - try: - is_dir = entry.is_dir(follow_symlinks=False) - except FileNotFoundError: - continue - except OSError: - is_dir = False - else: - if is_dir: - try: - orig_st = entry.stat(follow_symlinks=False) - is_dir = stat.S_ISDIR(orig_st.st_mode) - except FileNotFoundError: - continue - except OSError as err: - onexc(os.lstat, fullname, err) - continue - if is_dir: + for entry in entries: + fullname = os.path.join(path, entry.name) try: - dirfd = os.open(entry.name, os.O_RDONLY | os.O_NONBLOCK, dir_fd=topfd) - dirfd_closed = False + if entry.is_dir(follow_symlinks=False): + # Traverse into sub-directory. + stack.append((os.lstat, topfd, fullname, entry)) + continue except FileNotFoundError: continue - except OSError as err: - onexc(os.open, fullname, err) - else: - try: - if os.path.samestat(orig_st, os.fstat(dirfd)): - _rmtree_safe_fd(dirfd, fullname, onexc) - try: - os.close(dirfd) - except OSError as err: - # close() should not be retried after an error. - dirfd_closed = True - onexc(os.close, fullname, err) - dirfd_closed = True - try: - os.rmdir(entry.name, dir_fd=topfd) - except FileNotFoundError: - continue - except OSError as err: - onexc(os.rmdir, fullname, err) - else: - try: - # This can only happen if someone replaces - # a directory with a symlink after the call to - # os.scandir or stat.S_ISDIR above. - raise OSError("Cannot call rmtree on a symbolic " - "link") - except OSError as err: - onexc(os.path.islink, fullname, err) - finally: - if not dirfd_closed: - try: - os.close(dirfd) - except OSError as err: - onexc(os.close, fullname, err) - else: + except OSError: + pass try: os.unlink(entry.name, dir_fd=topfd) except FileNotFoundError: continue except OSError as err: onexc(os.unlink, fullname, err) + except FileNotFoundError as err: + if orig_entry is None or func is os.close: + err.filename = path + onexc(func, path, err) + except OSError as err: + err.filename = path + onexc(func, path, err) _use_fd_functions = ({os.open, os.stat, os.unlink, os.rmdir} <= os.supports_dir_fd and @@ -778,41 +757,16 @@ def onexc(*args): # While the unsafe rmtree works fine on bytes, the fd based does not. if isinstance(path, bytes): path = os.fsdecode(path) - # Note: To guard against symlink races, we use the standard - # lstat()/open()/fstat() trick. - try: - orig_st = os.lstat(path, dir_fd=dir_fd) - except OSError as err: - onexc(os.lstat, path, err) - return - try: - fd = os.open(path, os.O_RDONLY | os.O_NONBLOCK, dir_fd=dir_fd) - fd_closed = False - except OSError as err: - onexc(os.open, path, err) - return + stack = [(os.lstat, dir_fd, path, None)] try: - if os.path.samestat(orig_st, os.fstat(fd)): - _rmtree_safe_fd(fd, path, onexc) - try: - os.close(fd) - except OSError as err: - # close() should not be retried after an error. - fd_closed = True - onexc(os.close, path, err) - fd_closed = True - try: - os.rmdir(path, dir_fd=dir_fd) - except OSError as err: - onexc(os.rmdir, path, err) - else: - try: - # symlinks to directories are forbidden, see bug #1669 - raise OSError("Cannot call rmtree on a symbolic link") - except OSError as err: - onexc(os.path.islink, path, err) + while stack: + _rmtree_safe_fd(stack, onexc) finally: - if not fd_closed: + # Close any file descriptors still on the stack. + while stack: + func, fd, path, entry = stack.pop() + if func is not os.close: + continue try: os.close(fd) except OSError as err: diff --git a/Lib/site.py b/Lib/site.py index b63447d6673f68..7eace190f5ab21 100644 --- a/Lib/site.py +++ b/Lib/site.py @@ -185,7 +185,9 @@ def addpackage(sitedir, name, known_paths): return try: - pth_content = pth_content.decode() + # Accept BOM markers in .pth files as we do in source files + # (Windows PowerShell 5.1 makes it hard to emit UTF-8 files without a BOM) + pth_content = pth_content.decode("utf-8-sig") except UnicodeDecodeError: # Fallback to locale encoding for backward compatibility. # We will deprecate this fallback in the future. @@ -524,7 +526,13 @@ def register_readline(): def write_history(): try: - if os.getenv("PYTHON_BASIC_REPL"): + # _pyrepl.__main__ is executed as the __main__ module + from __main__ import CAN_USE_PYREPL + except ImportError: + CAN_USE_PYREPL = False + + try: + if os.getenv("PYTHON_BASIC_REPL") or not CAN_USE_PYREPL: readline.write_history_file(history) else: _pyrepl.readline.write_history_file(history) diff --git a/Lib/socket.py b/Lib/socket.py index 77986fc2e48099..524ce1361b9091 100644 --- a/Lib/socket.py +++ b/Lib/socket.py @@ -306,7 +306,8 @@ def makefile(self, mode="r", buffering=None, *, """makefile(...) -> an I/O stream connected to the socket The arguments are as for io.open() after the filename, except the only - supported mode values are 'r' (default), 'w' and 'b'. + supported mode values are 'r' (default), 'w', 'b', or a combination of + those. """ # XXX refactor to share code? if not set(mode) <= {"r", "w", "b"}: diff --git a/Lib/subprocess.py b/Lib/subprocess.py index 212fdf5b095511..b2dcb1454c139e 100644 --- a/Lib/subprocess.py +++ b/Lib/subprocess.py @@ -842,7 +842,7 @@ def __init__(self, args, bufsize=-1, executable=None, raise TypeError("bufsize must be an integer") if stdout is STDOUT: - raise ValueError("STDOUT can only be used for stderr") + raise ValueError("STDOUT can only be used for stderr") if pipesize is None: pipesize = -1 # Restore default diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index 46afdfca331a23..f126b6745dc83b 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -22,7 +22,6 @@ import subprocess import struct import operator -import pathlib import pickle import weakref import warnings @@ -324,8 +323,9 @@ def test_set_executable(self): self.skipTest(f'test not appropriate for {self.TYPE}') paths = [ sys.executable, # str - sys.executable.encode(), # bytes - pathlib.Path(sys.executable) # os.PathLike + os.fsencode(sys.executable), # bytes + os_helper.FakePath(sys.executable), # os.PathLike + os_helper.FakePath(os.fsencode(sys.executable)), # os.PathLike bytes ] for path in paths: self.set_executable(path) diff --git a/Lib/test/audit-tests.py b/Lib/test/audit-tests.py index de7d0da560a1c7..b9021467817f27 100644 --- a/Lib/test/audit-tests.py +++ b/Lib/test/audit-tests.py @@ -556,6 +556,17 @@ def hook(event, args): sys.monitoring.register_callback(1, 1, None) +def test_winapi_createnamedpipe(pipe_name): + import _winapi + + def hook(event, args): + if event == "_winapi.CreateNamedPipe": + print(event, args) + + sys.addaudithook(hook) + _winapi.CreateNamedPipe(pipe_name, _winapi.PIPE_ACCESS_DUPLEX, 8, 2, 0, 0, 0, 0) + + if __name__ == "__main__": from test.support import suppress_msvcrt_asserts diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py index 570110893629cf..db6502b34711af 100644 --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -22,6 +22,7 @@ from test import support from test.support import is_resource_enabled, ALWAYS_EQ, LARGEST, SMALLEST +from test.support import warnings_helper import datetime as datetime_module from datetime import MINYEAR, MAXYEAR @@ -46,6 +47,25 @@ pass # +# This is copied from test_import/__init__.py. +def no_rerun(reason): + """Skip rerunning for a particular test. + + WARNING: Use this decorator with care; skipping rerunning makes it + impossible to find reference leaks. Provide a clear reason for skipping the + test using the 'reason' parameter. + """ + def deco(func): + _has_run = False + def wrapper(self): + nonlocal _has_run + if _has_run: + self.skipTest(reason) + func(self) + _has_run = True + return wrapper + return deco + pickle_loads = {pickle.loads, pickle._loads} pickle_choices = [(pickle, pickle, proto) @@ -2797,6 +2817,7 @@ def test_strptime_single_digit(self): newdate = strptime(string, format) self.assertEqual(newdate, target, msg=reason) + @warnings_helper.ignore_warnings(category=DeprecationWarning) def test_strptime_leap_year(self): # GH-70647: warns if parsing a format with a day and no year. with self.assertRaises(ValueError): @@ -6381,6 +6402,7 @@ class IranTest(ZoneInfoTest): @unittest.skipIf(_testcapi is None, 'need _testcapi module') +@no_rerun("the encapsulated datetime C API does not support reloading") class CapiTest(unittest.TestCase): def setUp(self): # Since the C API is not present in the _Pure tests, skip all tests diff --git a/Lib/test/libregrtest/cmdline.py b/Lib/test/libregrtest/cmdline.py index 3e7428c4ad3797..d4dac77b250ad6 100644 --- a/Lib/test/libregrtest/cmdline.py +++ b/Lib/test/libregrtest/cmdline.py @@ -520,15 +520,6 @@ def _parse_args(args, **kwargs): "--huntrleaks without -jN option", file=sys.stderr) - if ns.huntrleaks and ns.xmlpath: - # The XML data is written into a file outside runtest_refleak(), so - # it looks like a leak but it's not. Simply disable XML output when - # hunting for reference leaks (gh-83434). - ns.xmlpath = None - print("WARNING: Disable --junit-xml because it's incompatible " - "with --huntrleaks", - file=sys.stderr) - if ns.forever: # --forever implies --failfast ns.failfast = True diff --git a/Lib/test/libregrtest/refleak.py b/Lib/test/libregrtest/refleak.py index f582c0d3e7ff13..697f305cad1fbd 100644 --- a/Lib/test/libregrtest/refleak.py +++ b/Lib/test/libregrtest/refleak.py @@ -1,3 +1,4 @@ +import os import sys import warnings from inspect import isabstract @@ -23,6 +24,30 @@ def _get_dump(cls): cls._abc_negative_cache, cls._abc_negative_cache_version) +def save_support_xml(filename): + if support.junit_xml_list is None: + return + + import pickle + with open(filename, 'xb') as fp: + pickle.dump(support.junit_xml_list, fp) + support.junit_xml_list = None + + +def restore_support_xml(filename): + try: + fp = open(filename, 'rb') + except FileNotFoundError: + return + + import pickle + with fp: + xml_list = pickle.load(fp) + os.unlink(filename) + + support.junit_xml_list = xml_list + + def runtest_refleak(test_name, test_func, hunt_refleak: HuntRefleak, quiet: bool): @@ -95,7 +120,8 @@ def get_pooled_int(value): numbers = numbers[:warmups] + ':' + numbers[warmups:] print(numbers, file=sys.stderr, flush=True) - results = None + xml_filename = 'refleak-xml.tmp' + result = None dash_R_cleanup(fs, ps, pic, zdc, abcs) support.gc_collect() @@ -103,10 +129,11 @@ def get_pooled_int(value): current = refleak_helper._hunting_for_refleaks refleak_helper._hunting_for_refleaks = True try: - results = test_func() + result = test_func() finally: refleak_helper._hunting_for_refleaks = current + save_support_xml(xml_filename) dash_R_cleanup(fs, ps, pic, zdc, abcs) support.gc_collect() @@ -145,6 +172,8 @@ def get_pooled_int(value): fd_before = fd_after interned_before = interned_after + restore_support_xml(xml_filename) + if not quiet: print(file=sys.stderr) @@ -189,7 +218,7 @@ def check_fd_deltas(deltas): failed = True else: print(' (this is fine)', file=sys.stderr, flush=True) - return (failed, results) + return (failed, result) def dash_R_cleanup(fs, ps, pic, zdc, abcs): diff --git a/Lib/test/libregrtest/results.py b/Lib/test/libregrtest/results.py index 85c82052eae19b..0e28435bc7d629 100644 --- a/Lib/test/libregrtest/results.py +++ b/Lib/test/libregrtest/results.py @@ -18,7 +18,7 @@ class TestResults: - def __init__(self): + def __init__(self) -> None: self.bad: TestList = [] self.good: TestList = [] self.rerun_bad: TestList = [] @@ -38,22 +38,22 @@ def __init__(self): # used by -T with -j self.covered_lines: set[Location] = set() - def is_all_good(self): + def is_all_good(self) -> bool: return (not self.bad and not self.skipped and not self.interrupted and not self.worker_bug) - def get_executed(self): + def get_executed(self) -> set[TestName]: return (set(self.good) | set(self.bad) | set(self.skipped) | set(self.resource_denied) | set(self.env_changed) | set(self.run_no_tests)) - def no_tests_run(self): + def no_tests_run(self) -> bool: return not any((self.good, self.bad, self.skipped, self.interrupted, self.env_changed)) - def get_state(self, fail_env_changed): + def get_state(self, fail_env_changed: bool) -> str: state = [] if self.bad: state.append("FAILURE") @@ -204,7 +204,7 @@ def display_result(self, tests: TestTuple, quiet: bool, print_slowest: bool): omitted = set(tests) - self.get_executed() # less important - all_tests.append((omitted, "test", "{} omitted:")) + all_tests.append((sorted(omitted), "test", "{} omitted:")) if not quiet: all_tests.append((self.skipped, "test", "{} skipped:")) all_tests.append((self.resource_denied, "test", "{} skipped (resource denied):")) diff --git a/Lib/test/libregrtest/single.py b/Lib/test/libregrtest/single.py index fc2f2716ad4ce0..adc8f1f455579f 100644 --- a/Lib/test/libregrtest/single.py +++ b/Lib/test/libregrtest/single.py @@ -57,7 +57,10 @@ def _run_suite(suite): result = runner.run(suite) if support.junit_xml_list is not None: - support.junit_xml_list.append(result.get_xml_element()) + import xml.etree.ElementTree as ET + xml_elem = result.get_xml_element() + xml_str = ET.tostring(xml_elem).decode('ascii') + support.junit_xml_list.append(xml_str) if not result.testsRun and not result.skipped and not result.errors: raise support.TestDidNotRun @@ -280,9 +283,7 @@ def _runtest(result: TestResult, runtests: RunTests) -> None: xml_list = support.junit_xml_list if xml_list: - import xml.etree.ElementTree as ET - result.xml_data = [ET.tostring(x).decode('us-ascii') - for x in xml_list] + result.xml_data = xml_list finally: if use_timeout: faulthandler.cancel_dump_traceback_later() diff --git a/Lib/test/pythoninfo.py b/Lib/test/pythoninfo.py index 1db9fb9537f888..d928e002ebda10 100644 --- a/Lib/test/pythoninfo.py +++ b/Lib/test/pythoninfo.py @@ -105,9 +105,13 @@ def collect_sys(info_add): ) copy_attributes(info_add, sys, 'sys.%s', attributes) - call_func(info_add, 'sys.androidapilevel', sys, 'getandroidapilevel') - call_func(info_add, 'sys.windowsversion', sys, 'getwindowsversion') - call_func(info_add, 'sys.getrecursionlimit', sys, 'getrecursionlimit') + for func in ( + '_is_gil_enabled', + 'getandroidapilevel', + 'getrecursionlimit', + 'getwindowsversion', + ): + call_func(info_add, f'sys.{func}', sys, func) encoding = sys.getfilesystemencoding() if hasattr(sys, 'getfilesystemencodeerrors'): diff --git a/Lib/test/seq_tests.py b/Lib/test/seq_tests.py index a41970d8f3f55a..719c9434a16820 100644 --- a/Lib/test/seq_tests.py +++ b/Lib/test/seq_tests.py @@ -426,6 +426,7 @@ def test_pickle(self): self.assertEqual(lst2, lst) self.assertNotEqual(id(lst2), id(lst)) + @support.suppress_immortalization() def test_free_after_iterating(self): support.check_free_after_iterating(self, iter, self.type2test) support.check_free_after_iterating(self, reversed, self.type2test) diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index e2a854663ddb7d..d7a3a549d9cdc2 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -529,11 +529,11 @@ def suppress_immortalization(suppress=True): yield return - old_values = _testinternalcapi.set_immortalize_deferred(False) + _testinternalcapi.suppress_immortalization(True) try: yield finally: - _testinternalcapi.set_immortalize_deferred(*old_values) + _testinternalcapi.suppress_immortalization(False) def skip_if_suppress_immortalization(): try: diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py index f6e22d44406d9e..5422c861ffb5c0 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -3036,6 +3036,23 @@ def test_FunctionDef(self): self.assertEqual(node.name, 'foo') self.assertEqual(node.decorator_list, []) + def test_expr_context(self): + name = ast.Name("x") + self.assertEqual(name.id, "x") + self.assertIsInstance(name.ctx, ast.Load) + + name2 = ast.Name("x", ast.Store()) + self.assertEqual(name2.id, "x") + self.assertIsInstance(name2.ctx, ast.Store) + + name3 = ast.Name("x", ctx=ast.Del()) + self.assertEqual(name3.id, "x") + self.assertIsInstance(name3.ctx, ast.Del) + + with self.assertWarnsRegex(DeprecationWarning, + r"Name\.__init__ missing 1 required positional argument: 'id'"): + name3 = ast.Name() + def test_custom_subclass_with_no_fields(self): class NoInit(ast.AST): pass diff --git a/Lib/test/test_asyncio/test_proactor_events.py b/Lib/test/test_asyncio/test_proactor_events.py index fcaa2f6ade2b76..4b3d551dd7b3a2 100644 --- a/Lib/test/test_asyncio/test_proactor_events.py +++ b/Lib/test/test_asyncio/test_proactor_events.py @@ -1018,9 +1018,9 @@ def setUp(self): self.addCleanup(self.file.close) super().setUp() - def make_socket(self, cleanup=True): + def make_socket(self, cleanup=True, blocking=False): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.setblocking(False) + sock.setblocking(blocking) sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1024) sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 1024) if cleanup: @@ -1082,6 +1082,11 @@ def test_sock_sendfile_not_regular_file(self): 0, None)) self.assertEqual(self.file.tell(), 0) + def test_blocking_socket(self): + self.loop.set_debug(True) + sock = self.make_socket(blocking=True) + with self.assertRaisesRegex(ValueError, "must be non-blocking"): + self.run_loop(self.loop.sock_sendfile(sock, self.file)) if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_asyncio/test_staggered.py b/Lib/test/test_asyncio/test_staggered.py new file mode 100644 index 00000000000000..e6e32f7dbbbcba --- /dev/null +++ b/Lib/test/test_asyncio/test_staggered.py @@ -0,0 +1,97 @@ +import asyncio +import unittest +from asyncio.staggered import staggered_race + +from test import support + +support.requires_working_socket(module=True) + + +def tearDownModule(): + asyncio.set_event_loop_policy(None) + + +class StaggeredTests(unittest.IsolatedAsyncioTestCase): + async def test_empty(self): + winner, index, excs = await staggered_race( + [], + delay=None, + ) + + self.assertIs(winner, None) + self.assertIs(index, None) + self.assertEqual(excs, []) + + async def test_one_successful(self): + async def coro(index): + return f'Res: {index}' + + winner, index, excs = await staggered_race( + [ + lambda: coro(0), + lambda: coro(1), + ], + delay=None, + ) + + self.assertEqual(winner, 'Res: 0') + self.assertEqual(index, 0) + self.assertEqual(excs, [None]) + + async def test_first_error_second_successful(self): + async def coro(index): + if index == 0: + raise ValueError(index) + return f'Res: {index}' + + winner, index, excs = await staggered_race( + [ + lambda: coro(0), + lambda: coro(1), + ], + delay=None, + ) + + self.assertEqual(winner, 'Res: 1') + self.assertEqual(index, 1) + self.assertEqual(len(excs), 2) + self.assertIsInstance(excs[0], ValueError) + self.assertIs(excs[1], None) + + async def test_first_timeout_second_successful(self): + async def coro(index): + if index == 0: + await asyncio.sleep(10) # much bigger than delay + return f'Res: {index}' + + winner, index, excs = await staggered_race( + [ + lambda: coro(0), + lambda: coro(1), + ], + delay=0.1, + ) + + self.assertEqual(winner, 'Res: 1') + self.assertEqual(index, 1) + self.assertEqual(len(excs), 2) + self.assertIsInstance(excs[0], asyncio.CancelledError) + self.assertIs(excs[1], None) + + async def test_none_successful(self): + async def coro(index): + raise ValueError(index) + + winner, index, excs = await staggered_race( + [ + lambda: coro(0), + lambda: coro(1), + ], + delay=None, + ) + + self.assertIs(winner, None) + self.assertIs(index, None) + self.assertEqual(len(excs), 2) + self.assertIsInstance(excs[0], ValueError) + self.assertIsInstance(excs[1], ValueError) diff --git a/Lib/test/test_asyncio/test_unix_events.py b/Lib/test/test_asyncio/test_unix_events.py index 59ef9f5f58cabc..9452213c685851 100644 --- a/Lib/test/test_asyncio/test_unix_events.py +++ b/Lib/test/test_asyncio/test_unix_events.py @@ -6,7 +6,6 @@ import multiprocessing from multiprocessing.util import _cleanup_tests as multiprocessing_cleanup_tests import os -import pathlib import signal import socket import stat @@ -304,20 +303,20 @@ def test_create_unix_server_existing_path_sock(self): self.loop.run_until_complete(srv.wait_closed()) @socket_helper.skip_unless_bind_unix_socket - def test_create_unix_server_pathlib(self): + def test_create_unix_server_pathlike(self): with test_utils.unix_socket_path() as path: - path = pathlib.Path(path) + path = os_helper.FakePath(path) srv_coro = self.loop.create_unix_server(lambda: None, path) srv = self.loop.run_until_complete(srv_coro) srv.close() self.loop.run_until_complete(srv.wait_closed()) - def test_create_unix_connection_pathlib(self): + def test_create_unix_connection_pathlike(self): with test_utils.unix_socket_path() as path: - path = pathlib.Path(path) + path = os_helper.FakePath(path) coro = self.loop.create_unix_connection(lambda: None, path) with self.assertRaises(FileNotFoundError): - # If pathlib.Path wasn't supported, the exception would be + # If path-like object weren't supported, the exception would be # different. self.loop.run_until_complete(coro) diff --git a/Lib/test/test_audit.py b/Lib/test/test_audit.py index e163c7ad25cc7b..321d4f9abce8c7 100644 --- a/Lib/test/test_audit.py +++ b/Lib/test/test_audit.py @@ -291,6 +291,20 @@ def test_sys_monitoring_register_callback(self): self.assertEqual(actual, expected) + def test_winapi_createnamedpipe(self): + winapi = import_helper.import_module("_winapi") + + pipe_name = r"\\.\pipe\LOCAL\test_winapi_createnamed_pipe" + returncode, events, stderr = self.run_python("test_winapi_createnamedpipe", pipe_name) + if returncode: + self.fail(stderr) + + if support.verbose: + print(*events, sep='\n') + actual = [(ev[0], ev[2]) for ev in events] + expected = [("_winapi.CreateNamedPipe", f"({pipe_name!r}, 3, 8)")] + + self.assertEqual(actual, expected) if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py index 230789f29ff788..034c83a19fd085 100644 --- a/Lib/test/test_builtin.py +++ b/Lib/test/test_builtin.py @@ -2139,6 +2139,24 @@ def test_warning_notimplemented(self): with self.assertWarns(DeprecationWarning): self.assertFalse(not NotImplemented) + def test_singleton_attribute_access(self): + for singleton in (NotImplemented, Ellipsis): + with self.subTest(singleton): + self.assertIs(type(singleton), singleton.__class__) + self.assertIs(type(singleton).__class__, type) + + # Missing instance attributes: + with self.assertRaises(AttributeError): + singleton.prop = 1 + with self.assertRaises(AttributeError): + singleton.prop + + # Missing class attributes: + with self.assertRaises(TypeError): + type(singleton).prop = 1 + with self.assertRaises(AttributeError): + type(singleton).prop + class TestBreakpoint(unittest.TestCase): def setUp(self): diff --git a/Lib/test/test_capi/test_exceptions.py b/Lib/test/test_capi/test_exceptions.py index 1d158e3586e98d..c475b6d78d0c56 100644 --- a/Lib/test/test_capi/test_exceptions.py +++ b/Lib/test/test_capi/test_exceptions.py @@ -3,11 +3,12 @@ import re import sys import unittest +import textwrap from test import support from test.support import import_helper from test.support.os_helper import TESTFN, TESTFN_UNDECODABLE -from test.support.script_helper import assert_python_failure +from test.support.script_helper import assert_python_failure, assert_python_ok from test.support.testcase import ExceptionIsLikeMixin from .test_misc import decode_stderr @@ -68,6 +69,47 @@ def test_exc_info(self): else: self.assertTrue(False) + def test_warn_with_stacklevel(self): + code = textwrap.dedent('''\ + import _testcapi + + def foo(): + _testcapi.function_set_warning() + + foo() # line 6 + + + foo() # line 9 + ''') + proc = assert_python_ok("-c", code) + warnings = proc.err.splitlines() + self.assertEqual(warnings, [ + b':6: RuntimeWarning: Testing PyErr_WarnEx', + b' foo() # line 6', + b':9: RuntimeWarning: Testing PyErr_WarnEx', + b' foo() # line 9', + ]) + + def test_warn_during_finalization(self): + code = textwrap.dedent('''\ + import _testcapi + + class Foo: + def foo(self): + _testcapi.function_set_warning() + def __del__(self): + self.foo() + + ref = Foo() + ''') + proc = assert_python_ok("-c", code) + warnings = proc.err.splitlines() + # Due to the finalization of the interpreter, the source will be ommited + # because the ``warnings`` module cannot be imported at this time + self.assertEqual(warnings, [ + b':7: RuntimeWarning: Testing PyErr_WarnEx', + ]) + class Test_FatalError(unittest.TestCase): diff --git a/Lib/test/test_capi/test_getargs.py b/Lib/test/test_capi/test_getargs.py index e710400f75c235..232aa2a80025dc 100644 --- a/Lib/test/test_capi/test_getargs.py +++ b/Lib/test/test_capi/test_getargs.py @@ -4,11 +4,17 @@ import sys from test import support from test.support import import_helper +from test.support import script_helper from test.support import warnings_helper # Skip this test if the _testcapi module isn't available. _testcapi = import_helper.import_module('_testcapi') from _testcapi import getargs_keywords, getargs_keyword_only +try: + import _testinternalcapi +except ImportError: + _testinternalcapi = NULL + # > How about the following counterproposal. This also changes some of # > the other format codes to be a little more regular. # > @@ -1346,6 +1352,33 @@ def test_nested_tuple(self): "argument 1 must be sequence of length 1, not 0"): parse(((),), {}, '(' + f + ')', ['a']) + @unittest.skipIf(_testinternalcapi is None, 'needs _testinternalcapi') + def test_gh_119213(self): + rc, out, err = script_helper.assert_python_ok("-c", """if True: + from test import support + script = '''if True: + import _testinternalcapi + _testinternalcapi.gh_119213_getargs(spam='eggs') + ''' + config = dict( + allow_fork=False, + allow_exec=False, + allow_threads=True, + allow_daemon_threads=False, + use_main_obmalloc=False, + gil=2, + check_multi_interp_extensions=True, + ) + rc = support.run_in_subinterp_with_config(script, **config) + assert rc == 0 + + # The crash is different if the interpreter was not destroyed first. + #interpid = _testinternalcapi.create_interpreter() + #rc = _testinternalcapi.exec_interpreter(interpid, script) + #assert rc == 0 + """) + self.assertEqual(rc, 0) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_capi/test_misc.py b/Lib/test/test_capi/test_misc.py index 020e8493e57c0c..f3d16e4a2fc92a 100644 --- a/Lib/test/test_capi/test_misc.py +++ b/Lib/test/test_capi/test_misc.py @@ -26,7 +26,8 @@ from test.support import threading_helper from test.support import warnings_helper from test.support import requires_limited_api -from test.support import requires_gil_enabled, expected_failure_if_gil_disabled +from test.support import suppress_immortalization +from test.support import expected_failure_if_gil_disabled from test.support import Py_GIL_DISABLED from test.support.script_helper import assert_python_failure, assert_python_ok, run_python_until_end try: @@ -481,6 +482,7 @@ def test_heap_ctype_doc_and_text_signature(self): def test_null_type_doc(self): self.assertEqual(_testcapi.NullTpDocType.__doc__, None) + @suppress_immortalization() def test_subclass_of_heap_gc_ctype_with_tpdealloc_decrefs_once(self): class HeapGcCTypeSubclass(_testcapi.HeapGcCType): def __init__(self): @@ -498,6 +500,7 @@ def __init__(self): del subclass_instance self.assertEqual(type_refcnt - 1, sys.getrefcount(HeapGcCTypeSubclass)) + @suppress_immortalization() def test_subclass_of_heap_gc_ctype_with_del_modifying_dunder_class_only_decrefs_once(self): class A(_testcapi.HeapGcCType): def __init__(self): @@ -2885,6 +2888,22 @@ def callback(): t.start() t.join() + @threading_helper.reap_threads + @threading_helper.requires_working_threading() + def test_thread_gilstate_in_clear(self): + # See https://github.com/python/cpython/issues/119585 + class C: + def __del__(self): + _testcapi.gilstate_ensure_release() + + # Thread-local variables are destroyed in `PyThreadState_Clear()`. + local_var = threading.local() + + def callback(): + local_var.x = C() + + _testcapi._test_thread_state(callback) + @threading_helper.reap_threads @threading_helper.requires_working_threading() def test_gilstate_ensure_no_deadlock(self): diff --git a/Lib/test/test_compileall.py b/Lib/test/test_compileall.py index bf0fd051672db4..812ff5e7f84461 100644 --- a/Lib/test/test_compileall.py +++ b/Lib/test/test_compileall.py @@ -4,7 +4,6 @@ import importlib.util import io import os -import pathlib import py_compile import shutil import struct @@ -31,6 +30,7 @@ from test.support import script_helper from test.test_py_compile import without_source_date_epoch from test.test_py_compile import SourceDateEpochTestMeta +from test.support.os_helper import FakePath def get_pyc(script, opt): @@ -156,28 +156,28 @@ def test_compile_file_pathlike(self): self.assertFalse(os.path.isfile(self.bc_path)) # we should also test the output with support.captured_stdout() as stdout: - self.assertTrue(compileall.compile_file(pathlib.Path(self.source_path))) + self.assertTrue(compileall.compile_file(FakePath(self.source_path))) self.assertRegex(stdout.getvalue(), r'Compiling ([^WindowsPath|PosixPath].*)') self.assertTrue(os.path.isfile(self.bc_path)) def test_compile_file_pathlike_ddir(self): self.assertFalse(os.path.isfile(self.bc_path)) - self.assertTrue(compileall.compile_file(pathlib.Path(self.source_path), - ddir=pathlib.Path('ddir_path'), + self.assertTrue(compileall.compile_file(FakePath(self.source_path), + ddir=FakePath('ddir_path'), quiet=2)) self.assertTrue(os.path.isfile(self.bc_path)) def test_compile_file_pathlike_stripdir(self): self.assertFalse(os.path.isfile(self.bc_path)) - self.assertTrue(compileall.compile_file(pathlib.Path(self.source_path), - stripdir=pathlib.Path('stripdir_path'), + self.assertTrue(compileall.compile_file(FakePath(self.source_path), + stripdir=FakePath('stripdir_path'), quiet=2)) self.assertTrue(os.path.isfile(self.bc_path)) def test_compile_file_pathlike_prependdir(self): self.assertFalse(os.path.isfile(self.bc_path)) - self.assertTrue(compileall.compile_file(pathlib.Path(self.source_path), - prependdir=pathlib.Path('prependdir_path'), + self.assertTrue(compileall.compile_file(FakePath(self.source_path), + prependdir=FakePath('prependdir_path'), quiet=2)) self.assertTrue(os.path.isfile(self.bc_path)) @@ -228,22 +228,22 @@ def test_optimize(self): def test_compile_dir_pathlike(self): self.assertFalse(os.path.isfile(self.bc_path)) with support.captured_stdout() as stdout: - compileall.compile_dir(pathlib.Path(self.directory)) + compileall.compile_dir(FakePath(self.directory)) line = stdout.getvalue().splitlines()[0] self.assertRegex(line, r'Listing ([^WindowsPath|PosixPath].*)') self.assertTrue(os.path.isfile(self.bc_path)) def test_compile_dir_pathlike_stripdir(self): self.assertFalse(os.path.isfile(self.bc_path)) - self.assertTrue(compileall.compile_dir(pathlib.Path(self.directory), - stripdir=pathlib.Path('stripdir_path'), + self.assertTrue(compileall.compile_dir(FakePath(self.directory), + stripdir=FakePath('stripdir_path'), quiet=2)) self.assertTrue(os.path.isfile(self.bc_path)) def test_compile_dir_pathlike_prependdir(self): self.assertFalse(os.path.isfile(self.bc_path)) - self.assertTrue(compileall.compile_dir(pathlib.Path(self.directory), - prependdir=pathlib.Path('prependdir_path'), + self.assertTrue(compileall.compile_dir(FakePath(self.directory), + prependdir=FakePath('prependdir_path'), quiet=2)) self.assertTrue(os.path.isfile(self.bc_path)) diff --git a/Lib/test/test_configparser.py b/Lib/test/test_configparser.py index fe09472db89cd2..a934e493a76391 100644 --- a/Lib/test/test_configparser.py +++ b/Lib/test/test_configparser.py @@ -2,7 +2,6 @@ import configparser import io import os -import pathlib import textwrap import unittest @@ -745,12 +744,12 @@ def test_read_returns_file_list(self): self.assertEqual(cf.get("Foo Bar", "foo"), "newbar") # check when we pass only a Path object: cf = self.newconfig() - parsed_files = cf.read(pathlib.Path(file1), encoding="utf-8") + parsed_files = cf.read(os_helper.FakePath(file1), encoding="utf-8") self.assertEqual(parsed_files, [file1]) self.assertEqual(cf.get("Foo Bar", "foo"), "newbar") # check when we passed both a filename and a Path object: cf = self.newconfig() - parsed_files = cf.read([pathlib.Path(file1), file1], encoding="utf-8") + parsed_files = cf.read([os_helper.FakePath(file1), file1], encoding="utf-8") self.assertEqual(parsed_files, [file1, file1]) self.assertEqual(cf.get("Foo Bar", "foo"), "newbar") # check when we pass only missing files: diff --git a/Lib/test/test_ctypes/test_loading.py b/Lib/test/test_ctypes/test_loading.py index b25e81b65cf103..fc1eecb77e17e3 100644 --- a/Lib/test/test_ctypes/test_loading.py +++ b/Lib/test/test_ctypes/test_loading.py @@ -42,10 +42,7 @@ def test_load(self): self.skipTest('could not find library to load') CDLL(test_lib) CDLL(os.path.basename(test_lib)) - class CTypesTestPathLikeCls: - def __fspath__(self): - return test_lib - CDLL(CTypesTestPathLikeCls()) + CDLL(os_helper.FakePath(test_lib)) self.assertRaises(OSError, CDLL, self.unknowndll) def test_load_version(self): diff --git a/Lib/test/test_dataclasses/__init__.py b/Lib/test/test_dataclasses/__init__.py index 832e5672c77d0d..ffb8bbe75c504f 100644 --- a/Lib/test/test_dataclasses/__init__.py +++ b/Lib/test/test_dataclasses/__init__.py @@ -1317,6 +1317,29 @@ def __post_init__(self, init_base, init_derived): c = C(10, 11, 50, 51) self.assertEqual(vars(c), {'x': 21, 'y': 101}) + def test_init_var_name_shadowing(self): + # Because dataclasses rely exclusively on `__annotations__` for + # handling InitVar and `__annotations__` preserves shadowed definitions, + # you can actually shadow an InitVar with a method or property. + # + # This only works when there is no default value; `dataclasses` uses the + # actual name (which will be bound to the shadowing method) for default + # values. + @dataclass + class C: + shadowed: InitVar[int] + _shadowed: int = field(init=False) + + def __post_init__(self, shadowed): + self._shadowed = shadowed * 2 + + @property + def shadowed(self): + return self._shadowed * 3 + + c = C(5) + self.assertEqual(c.shadowed, 30) + def test_default_factory(self): # Test a factory that returns a new list. @dataclass @@ -1524,6 +1547,24 @@ class A(types.GenericAlias): self.assertTrue(is_dataclass(type(a))) self.assertTrue(is_dataclass(a)) + def test_is_dataclass_inheritance(self): + @dataclass + class X: + y: int + + class Z(X): + pass + + self.assertTrue(is_dataclass(X), "X should be a dataclass") + self.assertTrue( + is_dataclass(Z), + "Z should be a dataclass because it inherits from X", + ) + z_instance = Z(y=5) + self.assertTrue( + is_dataclass(z_instance), + "z_instance should be a dataclass because it is an instance of Z", + ) def test_helper_fields_with_class_instance(self): # Check that we can call fields() on either a class or instance, @@ -3515,8 +3556,114 @@ class A: class B(A): pass + self.assertEqual(B.__slots__, ()) B() + def test_dataclass_derived_generic(self): + T = typing.TypeVar('T') + + @dataclass(slots=True, weakref_slot=True) + class A(typing.Generic[T]): + pass + self.assertEqual(A.__slots__, ('__weakref__',)) + self.assertTrue(A.__weakref__) + A() + + @dataclass(slots=True, weakref_slot=True) + class B[T2]: + pass + self.assertEqual(B.__slots__, ('__weakref__',)) + self.assertTrue(B.__weakref__) + B() + + def test_dataclass_derived_generic_from_base(self): + T = typing.TypeVar('T') + + class RawBase: ... + + @dataclass(slots=True, weakref_slot=True) + class C1(typing.Generic[T], RawBase): + pass + self.assertEqual(C1.__slots__, ()) + self.assertTrue(C1.__weakref__) + C1() + @dataclass(slots=True, weakref_slot=True) + class C2(RawBase, typing.Generic[T]): + pass + self.assertEqual(C2.__slots__, ()) + self.assertTrue(C2.__weakref__) + C2() + + @dataclass(slots=True, weakref_slot=True) + class D[T2](RawBase): + pass + self.assertEqual(D.__slots__, ()) + self.assertTrue(D.__weakref__) + D() + + def test_dataclass_derived_generic_from_slotted_base(self): + T = typing.TypeVar('T') + + class WithSlots: + __slots__ = ('a', 'b') + + @dataclass(slots=True, weakref_slot=True) + class E1(WithSlots, Generic[T]): + pass + self.assertEqual(E1.__slots__, ('__weakref__',)) + self.assertTrue(E1.__weakref__) + E1() + @dataclass(slots=True, weakref_slot=True) + class E2(Generic[T], WithSlots): + pass + self.assertEqual(E2.__slots__, ('__weakref__',)) + self.assertTrue(E2.__weakref__) + E2() + + @dataclass(slots=True, weakref_slot=True) + class F[T2](WithSlots): + pass + self.assertEqual(F.__slots__, ('__weakref__',)) + self.assertTrue(F.__weakref__) + F() + + def test_dataclass_derived_generic_from_slotted_base(self): + T = typing.TypeVar('T') + + class WithWeakrefSlot: + __slots__ = ('__weakref__',) + + @dataclass(slots=True, weakref_slot=True) + class G1(WithWeakrefSlot, Generic[T]): + pass + self.assertEqual(G1.__slots__, ()) + self.assertTrue(G1.__weakref__) + G1() + @dataclass(slots=True, weakref_slot=True) + class G2(Generic[T], WithWeakrefSlot): + pass + self.assertEqual(G2.__slots__, ()) + self.assertTrue(G2.__weakref__) + G2() + + @dataclass(slots=True, weakref_slot=True) + class H[T2](WithWeakrefSlot): + pass + self.assertEqual(H.__slots__, ()) + self.assertTrue(H.__weakref__) + H() + + def test_dataclass_slot_dict(self): + class WithDictSlot: + __slots__ = ('__dict__',) + + @dataclass(slots=True) + class A(WithDictSlot): ... + + self.assertEqual(A.__slots__, ()) + self.assertEqual(A().__dict__, {}) + A() + class TestDescriptors(unittest.TestCase): def test_set_name(self): diff --git a/Lib/test/test_decimal.py b/Lib/test/test_decimal.py index e927e24b582a5d..46755107de0102 100644 --- a/Lib/test/test_decimal.py +++ b/Lib/test/test_decimal.py @@ -5892,13 +5892,17 @@ def load_tests(loader, tests, pattern): if TODO_TESTS is None: from doctest import DocTestSuite, IGNORE_EXCEPTION_DETAIL + orig_context = orig_sys_decimal.getcontext().copy() for mod in C, P: if not mod: continue def setUp(slf, mod=mod): sys.modules['decimal'] = mod - def tearDown(slf): + init(mod) + def tearDown(slf, mod=mod): sys.modules['decimal'] = orig_sys_decimal + mod.setcontext(ORIGINAL_CONTEXT[mod].copy()) + orig_sys_decimal.setcontext(orig_context.copy()) optionflags = IGNORE_EXCEPTION_DETAIL if mod is C else 0 sys.modules['decimal'] = mod tests.addTest(DocTestSuite(mod, setUp=setUp, tearDown=tearDown, @@ -5913,8 +5917,8 @@ def setUpModule(): TEST_ALL = ARITH if ARITH is not None else is_resource_enabled('decimal') def tearDownModule(): - if C: C.setcontext(ORIGINAL_CONTEXT[C]) - P.setcontext(ORIGINAL_CONTEXT[P]) + if C: C.setcontext(ORIGINAL_CONTEXT[C].copy()) + P.setcontext(ORIGINAL_CONTEXT[P].copy()) if not C: warnings.warn('C tests skipped: no module named _decimal.', UserWarning) diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py index 18144c8cbb2f0a..c3f292467a6738 100644 --- a/Lib/test/test_descr.py +++ b/Lib/test/test_descr.py @@ -5014,6 +5014,7 @@ def __new__(cls): cls.lst = [2**i for i in range(10000)] X.descr + @support.suppress_immortalization() def test_remove_subclass(self): # bpo-46417: when the last subclass of a type is deleted, # remove_subclass() clears the internal dictionary of subclasses: diff --git a/Lib/test/test_email/test__header_value_parser.py b/Lib/test/test_email/test__header_value_parser.py index 56a1e3a3de5aa2..5413319a414a62 100644 --- a/Lib/test/test_email/test__header_value_parser.py +++ b/Lib/test/test_email/test__header_value_parser.py @@ -3077,9 +3077,17 @@ def test_address_list_with_unicode_names_in_quotes(self): ' =?utf-8?q?bei=C3=9Ft_bei=C3=9Ft?= \n') def test_address_list_with_list_separator_after_fold(self): - to = '0123456789' * 8 + '@foo, ä ' + a = 'x' * 66 + '@example.com' + to = f'{a}, "Hübsch Kaktus" ' self._test(parser.get_address_list(to)[0], - '0123456789' * 8 + '@foo,\n =?utf-8?q?=C3=A4?= \n') + f'{a},\n =?utf-8?q?H=C3=BCbsch?= Kaktus \n') + + a = '.' * 79 + to = f'"{a}" , "Hübsch Kaktus" ' + self._test(parser.get_address_list(to)[0], + f'{a}\n' + ' , =?utf-8?q?H=C3=BCbsch?= Kaktus ' + '\n') # XXX Need tests with comments on various sides of a unicode token, # and with unicode tokens in the comments. Spaces inside the quotes diff --git a/Lib/test/test_email/test_generator.py b/Lib/test/test_email/test_generator.py index 3ebcb684d006d0..bfff1051262079 100644 --- a/Lib/test/test_email/test_generator.py +++ b/Lib/test/test_email/test_generator.py @@ -281,6 +281,41 @@ class TestBytesGenerator(TestGeneratorBase, TestEmailBase): ioclass = io.BytesIO typ = lambda self, x: x.encode('ascii') + def test_defaults_handle_spaces_between_encoded_words_when_folded(self): + source = ("Уведомление о принятии в работу обращения для" + " подключения услуги") + expected = ('Subject: =?utf-8?b?0KPQstC10LTQvtC80LvQtdC90LjQtSDQviDQv9GA0LjQvdGP0YLQuNC4?=\n' + ' =?utf-8?b?INCyINGA0LDQsdC+0YLRgyDQvtCx0YDQsNGJ0LXQvdC40Y8g0LTQu9GPINC/0L4=?=\n' + ' =?utf-8?b?0LTQutC70Y7Rh9C10L3QuNGPINGD0YHQu9GD0LPQuA==?=\n\n').encode('ascii') + msg = EmailMessage() + msg['Subject'] = source + s = io.BytesIO() + g = BytesGenerator(s) + g.flatten(msg) + self.assertEqual(s.getvalue(), expected) + + def test_defaults_handle_spaces_at_start_of_subject(self): + source = " Уведомление" + expected = b"Subject: =?utf-8?b?0KPQstC10LTQvtC80LvQtdC90LjQtQ==?=\n\n" + msg = EmailMessage() + msg['Subject'] = source + s = io.BytesIO() + g = BytesGenerator(s) + g.flatten(msg) + self.assertEqual(s.getvalue(), expected) + + def test_defaults_handle_spaces_at_start_of_continuation_line(self): + source = " ф ффффффффффффффффффф ф ф" + expected = (b"Subject: " + b"=?utf-8?b?0YQg0YTRhNGE0YTRhNGE0YTRhNGE0YTRhNGE0YTRhNGE0YTRhNGE0YQ=?=\n" + b" =?utf-8?b?INGEINGE?=\n\n") + msg = EmailMessage() + msg['Subject'] = source + s = io.BytesIO() + g = BytesGenerator(s) + g.flatten(msg) + self.assertEqual(s.getvalue(), expected) + def test_cte_type_7bit_handles_unknown_8bit(self): source = ("Subject: Maintenant je vous présente mon " "collègue\n\n").encode('utf-8') diff --git a/Lib/test/test_email/test_headerregistry.py b/Lib/test/test_email/test_headerregistry.py index bb7ca8dfd8c52c..5a608a033c7e54 100644 --- a/Lib/test/test_email/test_headerregistry.py +++ b/Lib/test/test_email/test_headerregistry.py @@ -7,6 +7,7 @@ from test.test_email import TestEmailBase, parameterize from email import headerregistry from email.headerregistry import Address, Group +from email.header import decode_header from test.support import ALWAYS_EQ @@ -1648,7 +1649,7 @@ def test_address_display_names(self): 'Lôrem ipsum dôlôr sit amet, cônsectetuer adipiscing. ' 'Suspendisse pôtenti. Aliquam nibh. Suspendisse pôtenti.', '=?utf-8?q?L=C3=B4rem_ipsum_d=C3=B4l=C3=B4r_sit_amet=2C_c' - '=C3=B4nsectetuer?=\n =?utf-8?q?adipiscing=2E_Suspendisse' + '=C3=B4nsectetuer?=\n =?utf-8?q?_adipiscing=2E_Suspendisse' '_p=C3=B4tenti=2E_Aliquam_nibh=2E?=\n Suspendisse =?utf-8' '?q?p=C3=B4tenti=2E?=', ), diff --git a/Lib/test/test_fileinput.py b/Lib/test/test_fileinput.py index b3ad41d2588c4c..b340ef7ed1621c 100644 --- a/Lib/test/test_fileinput.py +++ b/Lib/test/test_fileinput.py @@ -23,10 +23,9 @@ from io import BytesIO, StringIO from fileinput import FileInput, hook_encoded -from pathlib import Path from test.support import verbose -from test.support.os_helper import TESTFN +from test.support.os_helper import TESTFN, FakePath from test.support.os_helper import unlink as safe_unlink from test.support import os_helper from test import support @@ -478,23 +477,23 @@ def test_iteration_buffering(self): self.assertRaises(StopIteration, next, fi) self.assertEqual(src.linesread, []) - def test_pathlib_file(self): - t1 = Path(self.writeTmp("Pathlib file.")) + def test_pathlike_file(self): + t1 = FakePath(self.writeTmp("Path-like file.")) with FileInput(t1, encoding="utf-8") as fi: line = fi.readline() - self.assertEqual(line, 'Pathlib file.') + self.assertEqual(line, 'Path-like file.') self.assertEqual(fi.lineno(), 1) self.assertEqual(fi.filelineno(), 1) self.assertEqual(fi.filename(), os.fspath(t1)) - def test_pathlib_file_inplace(self): - t1 = Path(self.writeTmp('Pathlib file.')) + def test_pathlike_file_inplace(self): + t1 = FakePath(self.writeTmp('Path-like file.')) with FileInput(t1, inplace=True, encoding="utf-8") as fi: line = fi.readline() - self.assertEqual(line, 'Pathlib file.') + self.assertEqual(line, 'Path-like file.') print('Modified %s' % line) with open(t1, encoding="utf-8") as f: - self.assertEqual(f.read(), 'Modified Pathlib file.\n') + self.assertEqual(f.read(), 'Modified Path-like file.\n') class MockFileInput: diff --git a/Lib/test/test_fractions.py b/Lib/test/test_fractions.py index b45bd098a36684..3a714c64278847 100644 --- a/Lib/test/test_fractions.py +++ b/Lib/test/test_fractions.py @@ -1,5 +1,6 @@ """Tests for Lib/fractions.py.""" +import cmath from decimal import Decimal from test.support import requires_IEEE_754 import math @@ -91,6 +92,197 @@ class DummyFraction(fractions.Fraction): def _components(r): return (r.numerator, r.denominator) +def typed_approx_eq(a, b): + return type(a) == type(b) and (a == b or math.isclose(a, b)) + +class Symbolic: + """Simple non-numeric class for testing mixed arithmetic. + It is not Integral, Rational, Real or Complex, and cannot be conveted + to int, float or complex. but it supports some arithmetic operations. + """ + def __init__(self, value): + self.value = value + def __mul__(self, other): + if isinstance(other, F): + return NotImplemented + return self.__class__(f'{self} * {other}') + def __rmul__(self, other): + return self.__class__(f'{other} * {self}') + def __truediv__(self, other): + if isinstance(other, F): + return NotImplemented + return self.__class__(f'{self} / {other}') + def __rtruediv__(self, other): + return self.__class__(f'{other} / {self}') + def __mod__(self, other): + if isinstance(other, F): + return NotImplemented + return self.__class__(f'{self} % {other}') + def __rmod__(self, other): + return self.__class__(f'{other} % {self}') + def __pow__(self, other): + if isinstance(other, F): + return NotImplemented + return self.__class__(f'{self} ** {other}') + def __rpow__(self, other): + return self.__class__(f'{other} ** {self}') + def __eq__(self, other): + if other.__class__ != self.__class__: + return NotImplemented + return self.value == other.value + def __str__(self): + return f'{self.value}' + def __repr__(self): + return f'{self.__class__.__name__}({self.value!r})' + +class SymbolicReal(Symbolic): + pass +numbers.Real.register(SymbolicReal) + +class SymbolicComplex(Symbolic): + pass +numbers.Complex.register(SymbolicComplex) + +class Rat: + """Simple Rational class for testing mixed arithmetic.""" + def __init__(self, n, d): + self.numerator = n + self.denominator = d + def __mul__(self, other): + if isinstance(other, F): + return NotImplemented + return self.__class__(self.numerator * other.numerator, + self.denominator * other.denominator) + def __rmul__(self, other): + return self.__class__(other.numerator * self.numerator, + other.denominator * self.denominator) + def __truediv__(self, other): + if isinstance(other, F): + return NotImplemented + return self.__class__(self.numerator * other.denominator, + self.denominator * other.numerator) + def __rtruediv__(self, other): + return self.__class__(other.numerator * self.denominator, + other.denominator * self.numerator) + def __mod__(self, other): + if isinstance(other, F): + return NotImplemented + d = self.denominator * other.numerator + return self.__class__(self.numerator * other.denominator % d, d) + def __rmod__(self, other): + d = other.denominator * self.numerator + return self.__class__(other.numerator * self.denominator % d, d) + + return self.__class__(other.numerator / self.numerator, + other.denominator / self.denominator) + def __pow__(self, other): + if isinstance(other, F): + return NotImplemented + return self.__class__(self.numerator ** other, + self.denominator ** other) + def __float__(self): + return self.numerator / self.denominator + def __eq__(self, other): + if self.__class__ != other.__class__: + return NotImplemented + return (typed_approx_eq(self.numerator, other.numerator) and + typed_approx_eq(self.denominator, other.denominator)) + def __repr__(self): + return f'{self.__class__.__name__}({self.numerator!r}, {self.denominator!r})' +numbers.Rational.register(Rat) + +class Root: + """Simple Real class for testing mixed arithmetic.""" + def __init__(self, v, n=F(2)): + self.base = v + self.degree = n + def __mul__(self, other): + if isinstance(other, F): + return NotImplemented + return self.__class__(self.base * other**self.degree, self.degree) + def __rmul__(self, other): + return self.__class__(other**self.degree * self.base, self.degree) + def __truediv__(self, other): + if isinstance(other, F): + return NotImplemented + return self.__class__(self.base / other**self.degree, self.degree) + def __rtruediv__(self, other): + return self.__class__(other**self.degree / self.base, self.degree) + def __pow__(self, other): + if isinstance(other, F): + return NotImplemented + return self.__class__(self.base, self.degree / other) + def __float__(self): + return float(self.base) ** (1 / float(self.degree)) + def __eq__(self, other): + if self.__class__ != other.__class__: + return NotImplemented + return typed_approx_eq(self.base, other.base) and typed_approx_eq(self.degree, other.degree) + def __repr__(self): + return f'{self.__class__.__name__}({self.base!r}, {self.degree!r})' +numbers.Real.register(Root) + +class Polar: + """Simple Complex class for testing mixed arithmetic.""" + def __init__(self, r, phi): + self.r = r + self.phi = phi + def __mul__(self, other): + if isinstance(other, F): + return NotImplemented + return self.__class__(self.r * other, self.phi) + def __rmul__(self, other): + return self.__class__(other * self.r, self.phi) + def __truediv__(self, other): + if isinstance(other, F): + return NotImplemented + return self.__class__(self.r / other, self.phi) + def __rtruediv__(self, other): + return self.__class__(other / self.r, -self.phi) + def __pow__(self, other): + if isinstance(other, F): + return NotImplemented + return self.__class__(self.r ** other, self.phi * other) + def __eq__(self, other): + if self.__class__ != other.__class__: + return NotImplemented + return typed_approx_eq(self.r, other.r) and typed_approx_eq(self.phi, other.phi) + def __repr__(self): + return f'{self.__class__.__name__}({self.r!r}, {self.phi!r})' +numbers.Complex.register(Polar) + +class Rect: + """Other simple Complex class for testing mixed arithmetic.""" + def __init__(self, x, y): + self.x = x + self.y = y + def __mul__(self, other): + if isinstance(other, F): + return NotImplemented + return self.__class__(self.x * other, self.y * other) + def __rmul__(self, other): + return self.__class__(other * self.x, other * self.y) + def __truediv__(self, other): + if isinstance(other, F): + return NotImplemented + return self.__class__(self.x / other, self.y / other) + def __rtruediv__(self, other): + r = self.x * self.x + self.y * self.y + return self.__class__(other * (self.x / r), other * (self.y / r)) + def __rpow__(self, other): + return Polar(other ** self.x, math.log(other) * self.y) + def __complex__(self): + return complex(self.x, self.y) + def __eq__(self, other): + if self.__class__ != other.__class__: + return NotImplemented + return typed_approx_eq(self.x, other.x) and typed_approx_eq(self.y, other.y) + def __repr__(self): + return f'{self.__class__.__name__}({self.x!r}, {self.y!r})' +numbers.Complex.register(Rect) + +class RectComplex(Rect, complex): + pass class FractionTest(unittest.TestCase): @@ -593,6 +785,7 @@ def testMixedArithmetic(self): self.assertTypedEquals(0.9, 1.0 - F(1, 10)) self.assertTypedEquals(0.9 + 0j, (1.0 + 0j) - F(1, 10)) + def testMixedMultiplication(self): self.assertTypedEquals(F(1, 10), F(1, 10) * 1) self.assertTypedEquals(0.1, F(1, 10) * 1.0) self.assertTypedEquals(0.1 + 0j, F(1, 10) * (1.0 + 0j)) @@ -600,6 +793,29 @@ def testMixedArithmetic(self): self.assertTypedEquals(0.1, 1.0 * F(1, 10)) self.assertTypedEquals(0.1 + 0j, (1.0 + 0j) * F(1, 10)) + self.assertTypedEquals(F(3, 2) * DummyFraction(5, 3), F(5, 2)) + self.assertTypedEquals(DummyFraction(5, 3) * F(3, 2), F(5, 2)) + self.assertTypedEquals(F(3, 2) * Rat(5, 3), Rat(15, 6)) + self.assertTypedEquals(Rat(5, 3) * F(3, 2), F(5, 2)) + + self.assertTypedEquals(F(3, 2) * Root(4), Root(F(9, 1))) + self.assertTypedEquals(Root(4) * F(3, 2), 3.0) + self.assertEqual(F(3, 2) * SymbolicReal('X'), SymbolicReal('3/2 * X')) + self.assertRaises(TypeError, operator.mul, SymbolicReal('X'), F(3, 2)) + + self.assertTypedEquals(F(3, 2) * Polar(4, 2), Polar(F(6, 1), 2)) + self.assertTypedEquals(F(3, 2) * Polar(4.0, 2), Polar(6.0, 2)) + self.assertTypedEquals(F(3, 2) * Rect(4, 3), Rect(F(6, 1), F(9, 2))) + self.assertTypedEquals(F(3, 2) * RectComplex(4, 3), RectComplex(6.0+0j, 4.5+0j)) + self.assertRaises(TypeError, operator.mul, Polar(4, 2), F(3, 2)) + self.assertTypedEquals(Rect(4, 3) * F(3, 2), 6.0 + 4.5j) + self.assertEqual(F(3, 2) * SymbolicComplex('X'), SymbolicComplex('3/2 * X')) + self.assertRaises(TypeError, operator.mul, SymbolicComplex('X'), F(3, 2)) + + self.assertEqual(F(3, 2) * Symbolic('X'), Symbolic('3/2 * X')) + self.assertRaises(TypeError, operator.mul, Symbolic('X'), F(3, 2)) + + def testMixedDivision(self): self.assertTypedEquals(F(1, 10), F(1, 10) / 1) self.assertTypedEquals(0.1, F(1, 10) / 1.0) self.assertTypedEquals(0.1 + 0j, F(1, 10) / (1.0 + 0j)) @@ -607,6 +823,28 @@ def testMixedArithmetic(self): self.assertTypedEquals(10.0, 1.0 / F(1, 10)) self.assertTypedEquals(10.0 + 0j, (1.0 + 0j) / F(1, 10)) + self.assertTypedEquals(F(3, 2) / DummyFraction(3, 5), F(5, 2)) + self.assertTypedEquals(DummyFraction(5, 3) / F(2, 3), F(5, 2)) + self.assertTypedEquals(F(3, 2) / Rat(3, 5), Rat(15, 6)) + self.assertTypedEquals(Rat(5, 3) / F(2, 3), F(5, 2)) + + self.assertTypedEquals(F(2, 3) / Root(4), Root(F(1, 9))) + self.assertTypedEquals(Root(4) / F(2, 3), 3.0) + self.assertEqual(F(3, 2) / SymbolicReal('X'), SymbolicReal('3/2 / X')) + self.assertRaises(TypeError, operator.truediv, SymbolicReal('X'), F(3, 2)) + + self.assertTypedEquals(F(3, 2) / Polar(4, 2), Polar(F(3, 8), -2)) + self.assertTypedEquals(F(3, 2) / Polar(4.0, 2), Polar(0.375, -2)) + self.assertTypedEquals(F(3, 2) / Rect(4, 3), Rect(0.24, 0.18)) + self.assertRaises(TypeError, operator.truediv, Polar(4, 2), F(2, 3)) + self.assertTypedEquals(Rect(4, 3) / F(2, 3), 6.0 + 4.5j) + self.assertEqual(F(3, 2) / SymbolicComplex('X'), SymbolicComplex('3/2 / X')) + self.assertRaises(TypeError, operator.truediv, SymbolicComplex('X'), F(3, 2)) + + self.assertEqual(F(3, 2) / Symbolic('X'), Symbolic('3/2 / X')) + self.assertRaises(TypeError, operator.truediv, Symbolic('X'), F(2, 3)) + + def testMixedIntegerDivision(self): self.assertTypedEquals(0, F(1, 10) // 1) self.assertTypedEquals(0.0, F(1, 10) // 1.0) self.assertTypedEquals(10, 1 // F(1, 10)) @@ -631,6 +869,26 @@ def testMixedArithmetic(self): self.assertTypedTupleEquals(divmod(-0.1, float('inf')), divmod(F(-1, 10), float('inf'))) self.assertTypedTupleEquals(divmod(-0.1, float('-inf')), divmod(F(-1, 10), float('-inf'))) + self.assertTypedEquals(F(3, 2) % DummyFraction(3, 5), F(3, 10)) + self.assertTypedEquals(DummyFraction(5, 3) % F(2, 3), F(1, 3)) + self.assertTypedEquals(F(3, 2) % Rat(3, 5), Rat(3, 6)) + self.assertTypedEquals(Rat(5, 3) % F(2, 3), F(1, 3)) + + self.assertRaises(TypeError, operator.mod, F(2, 3), Root(4)) + self.assertTypedEquals(Root(4) % F(3, 2), 0.5) + self.assertEqual(F(3, 2) % SymbolicReal('X'), SymbolicReal('3/2 % X')) + self.assertRaises(TypeError, operator.mod, SymbolicReal('X'), F(3, 2)) + + self.assertRaises(TypeError, operator.mod, F(3, 2), Polar(4, 2)) + self.assertRaises(TypeError, operator.mod, F(3, 2), RectComplex(4, 3)) + self.assertRaises(TypeError, operator.mod, Rect(4, 3), F(2, 3)) + self.assertEqual(F(3, 2) % SymbolicComplex('X'), SymbolicComplex('3/2 % X')) + self.assertRaises(TypeError, operator.mod, SymbolicComplex('X'), F(3, 2)) + + self.assertEqual(F(3, 2) % Symbolic('X'), Symbolic('3/2 % X')) + self.assertRaises(TypeError, operator.mod, Symbolic('X'), F(2, 3)) + + def testMixedPower(self): # ** has more interesting conversion rules. self.assertTypedEquals(F(100, 1), F(1, 10) ** -2) self.assertTypedEquals(F(100, 1), F(10, 1) ** 2) @@ -647,6 +905,40 @@ def testMixedArithmetic(self): self.assertRaises(ZeroDivisionError, operator.pow, F(0, 1), -2) + self.assertTypedEquals(F(3, 2) ** Rat(3, 1), F(27, 8)) + self.assertTypedEquals(F(3, 2) ** Rat(-3, 1), F(8, 27)) + self.assertTypedEquals(F(-3, 2) ** Rat(-3, 1), F(-8, 27)) + self.assertTypedEquals(F(9, 4) ** Rat(3, 2), 3.375) + self.assertIsInstance(F(4, 9) ** Rat(-3, 2), float) + self.assertAlmostEqual(F(4, 9) ** Rat(-3, 2), 3.375) + self.assertAlmostEqual(F(-4, 9) ** Rat(-3, 2), 3.375j) + self.assertTypedEquals(Rat(9, 4) ** F(3, 2), 3.375) + self.assertTypedEquals(Rat(3, 2) ** F(3, 1), Rat(27, 8)) + self.assertTypedEquals(Rat(3, 2) ** F(-3, 1), F(8, 27)) + self.assertIsInstance(Rat(4, 9) ** F(-3, 2), float) + self.assertAlmostEqual(Rat(4, 9) ** F(-3, 2), 3.375) + + self.assertTypedEquals(Root(4) ** F(2, 3), Root(4, 3.0)) + self.assertTypedEquals(Root(4) ** F(2, 1), Root(4, F(1))) + self.assertTypedEquals(Root(4) ** F(-2, 1), Root(4, -F(1))) + self.assertTypedEquals(Root(4) ** F(-2, 3), Root(4, -3.0)) + self.assertEqual(F(3, 2) ** SymbolicReal('X'), SymbolicReal('1.5 ** X')) + self.assertEqual(SymbolicReal('X') ** F(3, 2), SymbolicReal('X ** 1.5')) + + self.assertTypedEquals(F(3, 2) ** Rect(2, 0), Polar(2.25, 0.0)) + self.assertTypedEquals(F(1, 1) ** Rect(2, 3), Polar(1.0, 0.0)) + self.assertTypedEquals(F(3, 2) ** RectComplex(2, 0), Polar(2.25, 0.0)) + self.assertTypedEquals(F(1, 1) ** RectComplex(2, 3), Polar(1.0, 0.0)) + self.assertTypedEquals(Polar(4, 2) ** F(3, 2), Polar(8.0, 3.0)) + self.assertTypedEquals(Polar(4, 2) ** F(3, 1), Polar(64, 6)) + self.assertTypedEquals(Polar(4, 2) ** F(-3, 1), Polar(0.015625, -6)) + self.assertTypedEquals(Polar(4, 2) ** F(-3, 2), Polar(0.125, -3.0)) + self.assertEqual(F(3, 2) ** SymbolicComplex('X'), SymbolicComplex('1.5 ** X')) + self.assertEqual(SymbolicComplex('X') ** F(3, 2), SymbolicComplex('X ** 1.5')) + + self.assertEqual(F(3, 2) ** Symbolic('X'), Symbolic('1.5 ** X')) + self.assertEqual(Symbolic('X') ** F(3, 2), Symbolic('X ** 1.5')) + def testMixingWithDecimal(self): # Decimal refuses mixed arithmetic (but not mixed comparisons) self.assertRaises(TypeError, operator.add, diff --git a/Lib/test/test_frame.py b/Lib/test/test_frame.py index 212255374bddd1..aee8d374b22710 100644 --- a/Lib/test/test_frame.py +++ b/Lib/test/test_frame.py @@ -371,6 +371,15 @@ def test_local_objects(self): f_locals['o'] = f_locals['k'] self.assertEqual(o, 'a.b.c') + def test_copy(self): + x = 0 + d = sys._getframe().f_locals + d_copy = d.copy() + self.assertIsInstance(d_copy, dict) + self.assertEqual(d_copy['x'], 0) + d_copy['x'] = 1 + self.assertEqual(x, 0) + def test_update_with_self(self): def f(): f_locals = sys._getframe().f_locals @@ -405,9 +414,6 @@ def test_sizeof(self): def test_unsupport(self): x = 1 d = sys._getframe().f_locals - with self.assertRaises(AttributeError): - d.copy() - with self.assertRaises(TypeError): copy.copy(d) diff --git a/Lib/test/test_free_threading/test_str.py b/Lib/test/test_free_threading/test_str.py new file mode 100644 index 00000000000000..f5e93b7de0aa9b --- /dev/null +++ b/Lib/test/test_free_threading/test_str.py @@ -0,0 +1,75 @@ +import sys +import unittest + +from itertools import cycle +from threading import Event, Thread +from unittest import TestCase + +from test.support import threading_helper + +@threading_helper.requires_working_threading() +class TestStr(TestCase): + def test_racing_join_extend(self): + '''Test joining a string being extended by another thread''' + l = [] + ITERS = 100 + READERS = 10 + done_event = Event() + def writer_func(): + for i in range(ITERS): + l.extend(map(str, range(i))) + l.clear() + done_event.set() + def reader_func(): + while not done_event.is_set(): + ''.join(l) + writer = Thread(target=writer_func) + readers = [] + for x in range(READERS): + reader = Thread(target=reader_func) + readers.append(reader) + reader.start() + + writer.start() + writer.join() + for reader in readers: + reader.join() + + def test_racing_join_replace(self): + ''' + Test joining a string of characters being replaced with ephemeral + strings by another thread. + ''' + l = [*'abcdefg'] + MAX_ORDINAL = 1_000 + READERS = 10 + done_event = Event() + + def writer_func(): + for i, c in zip(cycle(range(len(l))), + map(chr, range(128, MAX_ORDINAL))): + l[i] = c + done_event.set() + + def reader_func(): + while not done_event.is_set(): + ''.join(l) + ''.join(l) + ''.join(l) + ''.join(l) + + writer = Thread(target=writer_func) + readers = [] + for x in range(READERS): + reader = Thread(target=reader_func) + readers.append(reader) + reader.start() + + writer.start() + writer.join() + for reader in readers: + reader.join() + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index 4a9a7313712f60..26701ea8b4daf9 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -710,6 +710,14 @@ def wrapper(): self.assertTrue(wrapper.__doc__.startswith('max(')) self.assertEqual(wrapper.__annotations__, {}) + def test_update_type_wrapper(self): + def wrapper(*args): pass + + functools.update_wrapper(wrapper, type) + self.assertEqual(wrapper.__name__, 'type') + self.assertEqual(wrapper.__annotations__, {}) + self.assertEqual(wrapper.__type_params__, ()) + class TestWraps(TestUpdateWrapper): diff --git a/Lib/test/test_future_stmt/test_future.py b/Lib/test/test_future_stmt/test_future.py index 69ae58b0fbcae3..bb31d0a0023fad 100644 --- a/Lib/test/test_future_stmt/test_future.py +++ b/Lib/test/test_future_stmt/test_future.py @@ -171,26 +171,6 @@ def test_ensure_flags_dont_clash(self): } self.assertCountEqual(set(flags.values()), flags.values()) - def test_parserhack(self): - # test that the parser.c::future_hack function works as expected - # Note: although this test must pass, it's not testing the original - # bug as of 2.6 since the with statement is not optional and - # the parser hack disabled. If a new keyword is introduced in - # 2.6, change this to refer to the new future import. - try: - exec("from __future__ import print_function; print 0") - except SyntaxError: - pass - else: - self.fail("syntax error didn't occur") - - try: - exec("from __future__ import (print_function); print 0") - except SyntaxError: - pass - else: - self.fail("syntax error didn't occur") - def test_unicode_literals_exec(self): scope = {} exec("from __future__ import unicode_literals; x = ''", {}, scope) diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py index 52681dc18cfb86..906f9884d6792f 100644 --- a/Lib/test/test_gc.py +++ b/Lib/test/test_gc.py @@ -3,7 +3,8 @@ from test import support from test.support import (verbose, refcount_test, cpython_only, requires_subprocess, - requires_gil_enabled) + requires_gil_enabled, suppress_immortalization, + Py_GIL_DISABLED) from test.support.import_helper import import_module from test.support.os_helper import temp_dir, TESTFN, unlink from test.support.script_helper import assert_python_ok, make_script @@ -109,6 +110,7 @@ def test_tuple(self): del l self.assertEqual(gc.collect(), 2) + @suppress_immortalization() def test_class(self): class A: pass @@ -117,6 +119,7 @@ class A: del A self.assertNotEqual(gc.collect(), 0) + @suppress_immortalization() def test_newstyleclass(self): class A(object): pass @@ -133,6 +136,7 @@ class A: del a self.assertNotEqual(gc.collect(), 0) + @suppress_immortalization() def test_newinstance(self): class A(object): pass @@ -219,6 +223,7 @@ class B(object): self.fail("didn't find obj in garbage (finalizer)") gc.garbage.remove(obj) + @suppress_immortalization() def test_function(self): # Tricky: f -> d -> f, code should call d.clear() after the exec to # break the cycle. @@ -561,6 +566,7 @@ def test_get_referents(self): self.assertEqual(gc.get_referents(1, 'a', 4j), []) + @suppress_immortalization() def test_is_tracked(self): # Atomic built-in types are not tracked, user-defined objects and # mutable containers are. @@ -598,7 +604,9 @@ class UserFloatSlots(float): class UserIntSlots(int): __slots__ = () - self.assertTrue(gc.is_tracked(gc)) + if not Py_GIL_DISABLED: + # gh-117783: modules may be immortalized in free-threaded build + self.assertTrue(gc.is_tracked(gc)) self.assertTrue(gc.is_tracked(UserClass)) self.assertTrue(gc.is_tracked(UserClass())) self.assertTrue(gc.is_tracked(UserInt())) @@ -1347,6 +1355,10 @@ def callback(ignored): junk = [] i = 0 detector = GC_Detector() + if Py_GIL_DISABLED: + # The free-threaded build doesn't have multiple generations, so + # just trigger a GC manually. + gc.collect() while not detector.gc_happened: i += 1 if i > 10000: @@ -1415,6 +1427,10 @@ def __del__(self): detector = GC_Detector() junk = [] i = 0 + if Py_GIL_DISABLED: + # The free-threaded build doesn't have multiple generations, so + # just trigger a GC manually. + gc.collect() while not detector.gc_happened: i += 1 if i > 10000: diff --git a/Lib/test/test_genericpath.py b/Lib/test/test_genericpath.py index f407ee3caf154c..bf04b3fecf7057 100644 --- a/Lib/test/test_genericpath.py +++ b/Lib/test/test_genericpath.py @@ -135,6 +135,9 @@ def test_exists(self): self.assertIs(self.pathmodule.exists(filename), False) self.assertIs(self.pathmodule.exists(bfilename), False) + self.assertIs(self.pathmodule.lexists(filename), False) + self.assertIs(self.pathmodule.lexists(bfilename), False) + create_file(filename) self.assertIs(self.pathmodule.exists(filename), True) @@ -145,14 +148,13 @@ def test_exists(self): self.assertIs(self.pathmodule.exists(filename + '\x00'), False) self.assertIs(self.pathmodule.exists(bfilename + b'\x00'), False) - if self.pathmodule is not genericpath: - self.assertIs(self.pathmodule.lexists(filename), True) - self.assertIs(self.pathmodule.lexists(bfilename), True) + self.assertIs(self.pathmodule.lexists(filename), True) + self.assertIs(self.pathmodule.lexists(bfilename), True) - self.assertIs(self.pathmodule.lexists(filename + '\udfff'), False) - self.assertIs(self.pathmodule.lexists(bfilename + b'\xff'), False) - self.assertIs(self.pathmodule.lexists(filename + '\x00'), False) - self.assertIs(self.pathmodule.lexists(bfilename + b'\x00'), False) + self.assertIs(self.pathmodule.lexists(filename + '\udfff'), False) + self.assertIs(self.pathmodule.lexists(bfilename + b'\xff'), False) + self.assertIs(self.pathmodule.lexists(filename + '\x00'), False) + self.assertIs(self.pathmodule.lexists(bfilename + b'\x00'), False) @unittest.skipUnless(hasattr(os, "pipe"), "requires os.pipe()") @unittest.skipIf(is_emscripten, "Emscripten pipe fds have no stat") diff --git a/Lib/test/test_http_cookiejar.py b/Lib/test/test_http_cookiejar.py index 97e9c82cde9ec4..dbf9ce10f76f91 100644 --- a/Lib/test/test_http_cookiejar.py +++ b/Lib/test/test_http_cookiejar.py @@ -9,7 +9,6 @@ import time import unittest import urllib.request -import pathlib from http.cookiejar import (time2isoz, http2time, iso2time, time2netscape, parse_ns_headers, join_header_words, split_header_words, Cookie, @@ -337,9 +336,9 @@ def test_constructor_with_str(self): self.assertEqual(c.filename, filename) def test_constructor_with_path_like(self): - filename = pathlib.Path(os_helper.TESTFN) - c = LWPCookieJar(filename) - self.assertEqual(c.filename, os.fspath(filename)) + filename = os_helper.TESTFN + c = LWPCookieJar(os_helper.FakePath(filename)) + self.assertEqual(c.filename, filename) def test_constructor_with_none(self): c = LWPCookieJar(None) diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py index 79bf7dbdbb81a0..b5384b59463742 100644 --- a/Lib/test/test_imaplib.py +++ b/Lib/test/test_imaplib.py @@ -458,18 +458,14 @@ def test_simple_with_statement(self): with self.imap_class(*server.server_address): pass - @requires_resource('walltime') def test_imaplib_timeout_test(self): - _, server = self._setup(SimpleIMAPHandler) - addr = server.server_address[1] - client = self.imap_class("localhost", addr, timeout=None) - self.assertEqual(client.sock.timeout, None) - client.shutdown() - client = self.imap_class("localhost", addr, timeout=support.LOOPBACK_TIMEOUT) - self.assertEqual(client.sock.timeout, support.LOOPBACK_TIMEOUT) - client.shutdown() + _, server = self._setup(SimpleIMAPHandler, connect=False) + with self.imap_class(*server.server_address, timeout=None) as client: + self.assertEqual(client.sock.timeout, None) + with self.imap_class(*server.server_address, timeout=support.LOOPBACK_TIMEOUT) as client: + self.assertEqual(client.sock.timeout, support.LOOPBACK_TIMEOUT) with self.assertRaises(ValueError): - client = self.imap_class("localhost", addr, timeout=0) + self.imap_class(*server.server_address, timeout=0) def test_imaplib_timeout_functionality_test(self): class TimeoutHandler(SimpleIMAPHandler): @@ -552,7 +548,6 @@ class NewIMAPSSLTests(NewIMAPTestsMixin, unittest.TestCase): imap_class = IMAP4_SSL server_class = SecureTCPServer - @requires_resource('walltime') def test_ssl_raises(self): ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) self.assertEqual(ssl_context.verify_mode, ssl.CERT_REQUIRED) @@ -566,17 +561,16 @@ def test_ssl_raises(self): CERTIFICATE_VERIFY_FAILED # AWS-LC )""", re.X) with self.assertRaisesRegex(ssl.CertificateError, regex): - _, server = self._setup(SimpleIMAPHandler) + _, server = self._setup(SimpleIMAPHandler, connect=False) client = self.imap_class(*server.server_address, ssl_context=ssl_context) client.shutdown() - @requires_resource('walltime') def test_ssl_verified(self): ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) ssl_context.load_verify_locations(CAFILE) - _, server = self._setup(SimpleIMAPHandler) + _, server = self._setup(SimpleIMAPHandler, connect=False) client = self.imap_class("localhost", server.server_address[1], ssl_context=ssl_context) client.shutdown() diff --git a/Lib/test/test_import/__init__.py b/Lib/test/test_import/__init__.py index 64282d0f2d0bcf..b09065f812c0e4 100644 --- a/Lib/test/test_import/__init__.py +++ b/Lib/test/test_import/__init__.py @@ -2887,6 +2887,16 @@ def test_with_reinit_reloaded(self): self.assertIs(reloaded.snapshot.cached, reloaded.module) + @unittest.skipIf(_testinternalcapi is None, "requires _testinternalcapi") + def test_check_state_first(self): + for variant in ['', '_with_reinit', '_with_state']: + name = f'{self.NAME}{variant}_check_cache_first' + with self.subTest(name): + mod = self._load_dynamic(name, self.ORIGIN) + self.assertEqual(mod.__name__, name) + sys.modules.pop(name, None) + _testinternalcapi.clear_extension(name, self.ORIGIN) + # Currently, for every single-phrase init module loaded # in multiple interpreters, those interpreters share a # PyModuleDef for that object, which can be a problem. diff --git a/Lib/test/test_importlib/resources/data01/subdirectory/binary.file b/Lib/test/test_importlib/resources/data01/subdirectory/binary.file index eaf36c1daccfdf..5bd8bb897b1322 100644 Binary files a/Lib/test/test_importlib/resources/data01/subdirectory/binary.file and b/Lib/test/test_importlib/resources/data01/subdirectory/binary.file differ diff --git a/Lib/test/test_importlib/resources/namespacedata01/subdirectory/binary.file b/Lib/test/test_importlib/resources/namespacedata01/subdirectory/binary.file new file mode 100644 index 00000000000000..100f50643d8d21 --- /dev/null +++ b/Lib/test/test_importlib/resources/namespacedata01/subdirectory/binary.file @@ -0,0 +1 @@ +  \ No newline at end of file diff --git a/Lib/test/test_importlib/resources/test_contents.py b/Lib/test/test_importlib/resources/test_contents.py index 1a13f043a86f03..beab67ccc21680 100644 --- a/Lib/test/test_importlib/resources/test_contents.py +++ b/Lib/test/test_importlib/resources/test_contents.py @@ -31,8 +31,8 @@ class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase): class ContentsNamespaceTests(ContentsTests, unittest.TestCase): expected = { # no __init__ because of namespace design - # no subdirectory as incidental difference in fixture 'binary.file', + 'subdirectory', 'utf-16.file', 'utf-8.file', } diff --git a/Lib/test/test_importlib/resources/test_custom.py b/Lib/test/test_importlib/resources/test_custom.py index 73127209a2761b..640f90fc0dd91a 100644 --- a/Lib/test/test_importlib/resources/test_custom.py +++ b/Lib/test/test_importlib/resources/test_custom.py @@ -5,6 +5,7 @@ from test.support import os_helper from importlib import resources +from importlib.resources import abc from importlib.resources.abc import TraversableResources, ResourceReader from . import util @@ -39,8 +40,9 @@ def setUp(self): self.addCleanup(self.fixtures.close) def test_custom_loader(self): - temp_dir = self.fixtures.enter_context(os_helper.temp_dir()) + temp_dir = pathlib.Path(self.fixtures.enter_context(os_helper.temp_dir())) loader = SimpleLoader(MagicResources(temp_dir)) pkg = util.create_package_from_loader(loader) files = resources.files(pkg) - assert files is temp_dir + assert isinstance(files, abc.Traversable) + assert list(files.iterdir()) == [] diff --git a/Lib/test/test_importlib/resources/test_files.py b/Lib/test/test_importlib/resources/test_files.py index 26c8b04e44c3b9..7df6d03ead7480 100644 --- a/Lib/test/test_importlib/resources/test_files.py +++ b/Lib/test/test_importlib/resources/test_files.py @@ -1,4 +1,3 @@ -import typing import textwrap import unittest import warnings @@ -32,13 +31,14 @@ def test_read_text(self): actual = files.joinpath('utf-8.file').read_text(encoding='utf-8') assert actual == 'Hello, UTF-8 world!\n' - @unittest.skipUnless( - hasattr(typing, 'runtime_checkable'), - "Only suitable when typing supports runtime_checkable", - ) def test_traversable(self): assert isinstance(resources.files(self.data), Traversable) + def test_joinpath_with_multiple_args(self): + files = resources.files(self.data) + binfile = files.joinpath('subdirectory', 'binary.file') + self.assertTrue(binfile.is_file()) + def test_old_parameter(self): """ Files used to take a 'package' parameter. Make sure anyone @@ -64,6 +64,10 @@ def setUp(self): self.data = namespacedata01 +class OpenNamespaceZipTests(FilesTests, util.ZipSetup, unittest.TestCase): + ZIP_MODULE = 'namespacedata01' + + class SiteDir: def setUp(self): self.fixtures = contextlib.ExitStack() diff --git a/Lib/test/test_importlib/resources/test_open.py b/Lib/test/test_importlib/resources/test_open.py index 86becb4bfaad37..3b6b2142ef47b1 100644 --- a/Lib/test/test_importlib/resources/test_open.py +++ b/Lib/test/test_importlib/resources/test_open.py @@ -24,7 +24,7 @@ def test_open_binary(self): target = resources.files(self.data) / 'binary.file' with target.open('rb') as fp: result = fp.read() - self.assertEqual(result, b'\x00\x01\x02\x03') + self.assertEqual(result, bytes(range(4))) def test_open_text_default_encoding(self): target = resources.files(self.data) / 'utf-8.file' @@ -81,5 +81,9 @@ class OpenZipTests(OpenTests, util.ZipSetup, unittest.TestCase): pass +class OpenNamespaceZipTests(OpenTests, util.ZipSetup, unittest.TestCase): + ZIP_MODULE = 'namespacedata01' + + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_importlib/resources/test_path.py b/Lib/test/test_importlib/resources/test_path.py index 34a6bdd2d58b91..90b22905ab8692 100644 --- a/Lib/test/test_importlib/resources/test_path.py +++ b/Lib/test/test_importlib/resources/test_path.py @@ -1,4 +1,5 @@ import io +import pathlib import unittest from importlib import resources @@ -15,18 +16,13 @@ def execute(self, package, path): class PathTests: def test_reading(self): """ - Path should be readable. - - Test also implicitly verifies the returned object is a pathlib.Path - instance. + Path should be readable and a pathlib.Path instance. """ target = resources.files(self.data) / 'utf-8.file' with resources.as_file(target) as path: + self.assertIsInstance(path, pathlib.Path) self.assertTrue(path.name.endswith("utf-8.file"), repr(path)) - # pathlib.Path.read_text() was introduced in Python 3.5. - with path.open('r', encoding='utf-8') as file: - text = file.read() - self.assertEqual('Hello, UTF-8 world!\n', text) + self.assertEqual('Hello, UTF-8 world!\n', path.read_text(encoding='utf-8')) class PathDiskTests(PathTests, unittest.TestCase): diff --git a/Lib/test/test_importlib/resources/test_read.py b/Lib/test/test_importlib/resources/test_read.py index 088982681e8b0c..984feecbb9ed69 100644 --- a/Lib/test/test_importlib/resources/test_read.py +++ b/Lib/test/test_importlib/resources/test_read.py @@ -18,7 +18,7 @@ def execute(self, package, path): class ReadTests: def test_read_bytes(self): result = resources.files(self.data).joinpath('binary.file').read_bytes() - self.assertEqual(result, b'\0\1\2\3') + self.assertEqual(result, bytes(range(4))) def test_read_text_default_encoding(self): result = ( @@ -57,17 +57,15 @@ class ReadDiskTests(ReadTests, unittest.TestCase): class ReadZipTests(ReadTests, util.ZipSetup, unittest.TestCase): def test_read_submodule_resource(self): - submodule = import_module('ziptestdata.subdirectory') + submodule = import_module('data01.subdirectory') result = resources.files(submodule).joinpath('binary.file').read_bytes() - self.assertEqual(result, b'\0\1\2\3') + self.assertEqual(result, bytes(range(4, 8))) def test_read_submodule_resource_by_name(self): result = ( - resources.files('ziptestdata.subdirectory') - .joinpath('binary.file') - .read_bytes() + resources.files('data01.subdirectory').joinpath('binary.file').read_bytes() ) - self.assertEqual(result, b'\0\1\2\3') + self.assertEqual(result, bytes(range(4, 8))) class ReadNamespaceTests(ReadTests, unittest.TestCase): @@ -77,5 +75,22 @@ def setUp(self): self.data = namespacedata01 +class ReadNamespaceZipTests(ReadTests, util.ZipSetup, unittest.TestCase): + ZIP_MODULE = 'namespacedata01' + + def test_read_submodule_resource(self): + submodule = import_module('namespacedata01.subdirectory') + result = resources.files(submodule).joinpath('binary.file').read_bytes() + self.assertEqual(result, bytes(range(12, 16))) + + def test_read_submodule_resource_by_name(self): + result = ( + resources.files('namespacedata01.subdirectory') + .joinpath('binary.file') + .read_bytes() + ) + self.assertEqual(result, bytes(range(12, 16))) + + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_importlib/resources/test_reader.py b/Lib/test/test_importlib/resources/test_reader.py index 8670f72a334585..dac9c2a892ffd2 100644 --- a/Lib/test/test_importlib/resources/test_reader.py +++ b/Lib/test/test_importlib/resources/test_reader.py @@ -10,8 +10,7 @@ class MultiplexedPathTest(unittest.TestCase): @classmethod def setUpClass(cls): - path = pathlib.Path(__file__).parent / 'namespacedata01' - cls.folder = str(path) + cls.folder = pathlib.Path(__file__).parent / 'namespacedata01' def test_init_no_paths(self): with self.assertRaises(FileNotFoundError): @@ -19,7 +18,7 @@ def test_init_no_paths(self): def test_init_file(self): with self.assertRaises(NotADirectoryError): - MultiplexedPath(os.path.join(self.folder, 'binary.file')) + MultiplexedPath(self.folder / 'binary.file') def test_iterdir(self): contents = {path.name for path in MultiplexedPath(self.folder).iterdir()} @@ -27,10 +26,12 @@ def test_iterdir(self): contents.remove('__pycache__') except (KeyError, ValueError): pass - self.assertEqual(contents, {'binary.file', 'utf-16.file', 'utf-8.file'}) + self.assertEqual( + contents, {'subdirectory', 'binary.file', 'utf-16.file', 'utf-8.file'} + ) def test_iterdir_duplicate(self): - data01 = os.path.abspath(os.path.join(__file__, '..', 'data01')) + data01 = pathlib.Path(__file__).parent.joinpath('data01') contents = { path.name for path in MultiplexedPath(self.folder, data01).iterdir() } @@ -60,17 +61,17 @@ def test_open_file(self): path.open() def test_join_path(self): - prefix = os.path.abspath(os.path.join(__file__, '..')) - data01 = os.path.join(prefix, 'data01') + data01 = pathlib.Path(__file__).parent.joinpath('data01') + prefix = str(data01.parent) path = MultiplexedPath(self.folder, data01) self.assertEqual( str(path.joinpath('binary.file'))[len(prefix) + 1 :], os.path.join('namespacedata01', 'binary.file'), ) - self.assertEqual( - str(path.joinpath('subdirectory'))[len(prefix) + 1 :], - os.path.join('data01', 'subdirectory'), - ) + sub = path.joinpath('subdirectory') + assert isinstance(sub, MultiplexedPath) + assert 'namespacedata01' in str(sub) + assert 'data01' in str(sub) self.assertEqual( str(path.joinpath('imaginary'))[len(prefix) + 1 :], os.path.join('namespacedata01', 'imaginary'), @@ -82,9 +83,9 @@ def test_join_path_compound(self): assert not path.joinpath('imaginary/foo.py').exists() def test_join_path_common_subdir(self): - prefix = os.path.abspath(os.path.join(__file__, '..')) - data01 = os.path.join(prefix, 'data01') - data02 = os.path.join(prefix, 'data02') + data01 = pathlib.Path(__file__).parent.joinpath('data01') + data02 = pathlib.Path(__file__).parent.joinpath('data02') + prefix = str(data01.parent) path = MultiplexedPath(data01, data02) self.assertIsInstance(path.joinpath('subdirectory'), MultiplexedPath) self.assertEqual( diff --git a/Lib/test/test_importlib/resources/test_resource.py b/Lib/test/test_importlib/resources/test_resource.py index 6f75cf57f03d02..d1d45d9b4617f3 100644 --- a/Lib/test/test_importlib/resources/test_resource.py +++ b/Lib/test/test_importlib/resources/test_resource.py @@ -1,15 +1,10 @@ -import contextlib import sys import unittest -import uuid import pathlib from . import data01 -from . import zipdata01, zipdata02 from . import util from importlib import resources, import_module -from test.support import import_helper, os_helper -from test.support.os_helper import unlink class ResourceTests: @@ -89,34 +84,32 @@ def test_package_has_no_reader_fallback(self): class ResourceFromZipsTest01(util.ZipSetupBase, unittest.TestCase): - ZIP_MODULE = zipdata01 # type: ignore + ZIP_MODULE = 'data01' def test_is_submodule_resource(self): - submodule = import_module('ziptestdata.subdirectory') + submodule = import_module('data01.subdirectory') self.assertTrue(resources.files(submodule).joinpath('binary.file').is_file()) def test_read_submodule_resource_by_name(self): self.assertTrue( - resources.files('ziptestdata.subdirectory') - .joinpath('binary.file') - .is_file() + resources.files('data01.subdirectory').joinpath('binary.file').is_file() ) def test_submodule_contents(self): - submodule = import_module('ziptestdata.subdirectory') + submodule = import_module('data01.subdirectory') self.assertEqual( names(resources.files(submodule)), {'__init__.py', 'binary.file'} ) def test_submodule_contents_by_name(self): self.assertEqual( - names(resources.files('ziptestdata.subdirectory')), + names(resources.files('data01.subdirectory')), {'__init__.py', 'binary.file'}, ) def test_as_file_directory(self): - with resources.as_file(resources.files('ziptestdata')) as data: - assert data.name == 'ziptestdata' + with resources.as_file(resources.files('data01')) as data: + assert data.name == 'data01' assert data.is_dir() assert data.joinpath('subdirectory').is_dir() assert len(list(data.iterdir())) @@ -124,7 +117,7 @@ def test_as_file_directory(self): class ResourceFromZipsTest02(util.ZipSetupBase, unittest.TestCase): - ZIP_MODULE = zipdata02 # type: ignore + ZIP_MODULE = 'data02' def test_unrelated_contents(self): """ @@ -132,93 +125,48 @@ def test_unrelated_contents(self): distinct resources. Ref python/importlib_resources#44. """ self.assertEqual( - names(resources.files('ziptestdata.one')), + names(resources.files('data02.one')), {'__init__.py', 'resource1.txt'}, ) self.assertEqual( - names(resources.files('ziptestdata.two')), + names(resources.files('data02.two')), {'__init__.py', 'resource2.txt'}, ) -@contextlib.contextmanager -def zip_on_path(dir): - data_path = pathlib.Path(zipdata01.__file__) - source_zip_path = data_path.parent.joinpath('ziptestdata.zip') - zip_path = pathlib.Path(dir) / f'{uuid.uuid4()}.zip' - zip_path.write_bytes(source_zip_path.read_bytes()) - sys.path.append(str(zip_path)) - import_module('ziptestdata') - - try: - yield - finally: - with contextlib.suppress(ValueError): - sys.path.remove(str(zip_path)) - - with contextlib.suppress(KeyError): - del sys.path_importer_cache[str(zip_path)] - del sys.modules['ziptestdata'] - - with contextlib.suppress(OSError): - unlink(zip_path) - - -class DeletingZipsTest(unittest.TestCase): +class DeletingZipsTest(util.ZipSetupBase, unittest.TestCase): """Having accessed resources in a zip file should not keep an open reference to the zip. """ - def setUp(self): - self.fixtures = contextlib.ExitStack() - self.addCleanup(self.fixtures.close) - - modules = import_helper.modules_setup() - self.addCleanup(import_helper.modules_cleanup, *modules) - - temp_dir = self.fixtures.enter_context(os_helper.temp_dir()) - self.fixtures.enter_context(zip_on_path(temp_dir)) - def test_iterdir_does_not_keep_open(self): - [item.name for item in resources.files('ziptestdata').iterdir()] + [item.name for item in resources.files('data01').iterdir()] def test_is_file_does_not_keep_open(self): - resources.files('ziptestdata').joinpath('binary.file').is_file() + resources.files('data01').joinpath('binary.file').is_file() def test_is_file_failure_does_not_keep_open(self): - resources.files('ziptestdata').joinpath('not-present').is_file() + resources.files('data01').joinpath('not-present').is_file() @unittest.skip("Desired but not supported.") def test_as_file_does_not_keep_open(self): # pragma: no cover - resources.as_file(resources.files('ziptestdata') / 'binary.file') + resources.as_file(resources.files('data01') / 'binary.file') def test_entered_path_does_not_keep_open(self): """ Mimic what certifi does on import to make its bundle available for the process duration. """ - resources.as_file(resources.files('ziptestdata') / 'binary.file').__enter__() + resources.as_file(resources.files('data01') / 'binary.file').__enter__() def test_read_binary_does_not_keep_open(self): - resources.files('ziptestdata').joinpath('binary.file').read_bytes() + resources.files('data01').joinpath('binary.file').read_bytes() def test_read_text_does_not_keep_open(self): - resources.files('ziptestdata').joinpath('utf-8.file').read_text( - encoding='utf-8' - ) + resources.files('data01').joinpath('utf-8.file').read_text(encoding='utf-8') -class ResourceFromNamespaceTest01(unittest.TestCase): - site_dir = str(pathlib.Path(__file__).parent) - - @classmethod - def setUpClass(cls): - sys.path.append(cls.site_dir) - - @classmethod - def tearDownClass(cls): - sys.path.remove(cls.site_dir) - +class ResourceFromNamespaceTests: def test_is_submodule_resource(self): self.assertTrue( resources.files(import_module('namespacedata01')) @@ -237,7 +185,9 @@ def test_submodule_contents(self): contents.remove('__pycache__') except KeyError: pass - self.assertEqual(contents, {'binary.file', 'utf-8.file', 'utf-16.file'}) + self.assertEqual( + contents, {'subdirectory', 'binary.file', 'utf-8.file', 'utf-16.file'} + ) def test_submodule_contents_by_name(self): contents = names(resources.files('namespacedata01')) @@ -245,7 +195,45 @@ def test_submodule_contents_by_name(self): contents.remove('__pycache__') except KeyError: pass - self.assertEqual(contents, {'binary.file', 'utf-8.file', 'utf-16.file'}) + self.assertEqual( + contents, {'subdirectory', 'binary.file', 'utf-8.file', 'utf-16.file'} + ) + + def test_submodule_sub_contents(self): + contents = names(resources.files(import_module('namespacedata01.subdirectory'))) + try: + contents.remove('__pycache__') + except KeyError: + pass + self.assertEqual(contents, {'binary.file'}) + + def test_submodule_sub_contents_by_name(self): + contents = names(resources.files('namespacedata01.subdirectory')) + try: + contents.remove('__pycache__') + except KeyError: + pass + self.assertEqual(contents, {'binary.file'}) + + +class ResourceFromNamespaceDiskTests(ResourceFromNamespaceTests, unittest.TestCase): + site_dir = str(pathlib.Path(__file__).parent) + + @classmethod + def setUpClass(cls): + sys.path.append(cls.site_dir) + + @classmethod + def tearDownClass(cls): + sys.path.remove(cls.site_dir) + + +class ResourceFromNamespaceZipTests( + util.ZipSetupBase, + ResourceFromNamespaceTests, + unittest.TestCase, +): + ZIP_MODULE = 'namespacedata01' if __name__ == '__main__': diff --git a/Lib/test/test_importlib/resources/util.py b/Lib/test/test_importlib/resources/util.py index dbe6ee81476699..d4bf3e6cc5dfdc 100644 --- a/Lib/test/test_importlib/resources/util.py +++ b/Lib/test/test_importlib/resources/util.py @@ -4,11 +4,12 @@ import sys import types import pathlib +import contextlib from . import data01 -from . import zipdata01 from importlib.resources.abc import ResourceReader -from test.support import import_helper +from test.support import import_helper, os_helper +from . import zip as zip_ from importlib.machinery import ModuleSpec @@ -141,39 +142,23 @@ def test_useless_loader(self): class ZipSetupBase: - ZIP_MODULE = None - - @classmethod - def setUpClass(cls): - data_path = pathlib.Path(cls.ZIP_MODULE.__file__) - data_dir = data_path.parent - cls._zip_path = str(data_dir / 'ziptestdata.zip') - sys.path.append(cls._zip_path) - cls.data = importlib.import_module('ziptestdata') - - @classmethod - def tearDownClass(cls): - try: - sys.path.remove(cls._zip_path) - except ValueError: - pass - - try: - del sys.path_importer_cache[cls._zip_path] - del sys.modules[cls.data.__name__] - except KeyError: - pass - - try: - del cls.data - del cls._zip_path - except AttributeError: - pass + ZIP_MODULE = 'data01' def setUp(self): - modules = import_helper.modules_setup() - self.addCleanup(import_helper.modules_cleanup, *modules) + self.fixtures = contextlib.ExitStack() + self.addCleanup(self.fixtures.close) + + self.fixtures.enter_context(import_helper.isolated_modules()) + + temp_dir = self.fixtures.enter_context(os_helper.temp_dir()) + modules = pathlib.Path(temp_dir) / 'zipped modules.zip' + src_path = pathlib.Path(__file__).parent.joinpath(self.ZIP_MODULE) + self.fixtures.enter_context( + import_helper.DirsOnSysPath(str(zip_.make_zip_file(src_path, modules))) + ) + + self.data = importlib.import_module(self.ZIP_MODULE) class ZipSetup(ZipSetupBase): - ZIP_MODULE = zipdata01 # type: ignore + pass diff --git a/Lib/test/test_importlib/resources/zip.py b/Lib/test/test_importlib/resources/zip.py new file mode 100755 index 00000000000000..4dcf6facc770cb --- /dev/null +++ b/Lib/test/test_importlib/resources/zip.py @@ -0,0 +1,30 @@ +""" +Generate zip test data files. +""" + +import contextlib +import os +import pathlib +import zipfile + + +def make_zip_file(src, dst): + """ + Zip the files in src into a new zipfile at dst. + """ + with zipfile.ZipFile(dst, 'w') as zf: + for src_path, rel in walk(src): + dst_name = src.name / pathlib.PurePosixPath(rel.as_posix()) + zf.write(src_path, dst_name) + zipfile._path.CompleteDirs.inject(zf) + return dst + + +def walk(datapath): + for dirpath, dirnames, filenames in os.walk(datapath): + with contextlib.suppress(ValueError): + dirnames.remove('__pycache__') + for filename in filenames: + res = pathlib.Path(dirpath) / filename + rel = res.relative_to(datapath) + yield res, rel diff --git a/Lib/test/test_importlib/test_threaded_import.py b/Lib/test/test_importlib/test_threaded_import.py index 3477112927a0b6..9af1e4d505c66e 100644 --- a/Lib/test/test_importlib/test_threaded_import.py +++ b/Lib/test/test_importlib/test_threaded_import.py @@ -17,7 +17,7 @@ from test.support import verbose from test.support.import_helper import forget, mock_register_at_fork from test.support.os_helper import (TESTFN, unlink, rmtree) -from test.support import script_helper, threading_helper, requires_gil_enabled +from test.support import script_helper, threading_helper threading_helper.requires_working_threading(module=True) @@ -248,9 +248,6 @@ def test_concurrent_futures_circular_import(self): 'partial', 'cfimport.py') script_helper.assert_python_ok(fn) - # gh-118727 and gh-118729: pool_in_threads.py may crash in free-threaded - # builds, which can hang the Tsan test so temporarily skip it for now. - @requires_gil_enabled("gh-118727: test may crash in free-threaded builds") def test_multiprocessing_pool_circular_import(self): # Regression test for bpo-41567 fn = os.path.join(os.path.dirname(__file__), diff --git a/Lib/test/test_inspect/test_inspect.py b/Lib/test/test_inspect/test_inspect.py index 82e466e978624f..011d42f34b6461 100644 --- a/Lib/test/test_inspect/test_inspect.py +++ b/Lib/test/test_inspect/test_inspect.py @@ -34,7 +34,7 @@ except ImportError: ThreadPoolExecutor = None -from test.support import cpython_only, import_helper +from test.support import cpython_only, import_helper, suppress_immortalization from test.support import MISSING_C_DOCSTRINGS, ALWAYS_EQ from test.support.import_helper import DirsOnSysPath, ready_to_import from test.support.os_helper import TESTFN, temp_cwd @@ -768,6 +768,7 @@ def test_getfile_builtin_function_or_method(self): inspect.getfile(list.append) self.assertIn('expected, got', str(e_append.exception)) + @suppress_immortalization() def test_getfile_class_without_module(self): class CM(type): @property @@ -2430,6 +2431,7 @@ def __getattribute__(self, attr): self.assertFalse(test.called) + @suppress_immortalization() def test_cache_does_not_cause_classes_to_persist(self): # regression test for gh-118013: # check that the internal _shadowed_dict cache does not cause @@ -5087,15 +5089,30 @@ def test(a_po, b_po, c_po=3, /, foo=42, *, bar=50, **kwargs): self.assertEqual(self.call(test, 1, 2, foo=4, bar=5), (1, 2, 3, 4, 5, {})) - with self.assertRaisesRegex(TypeError, "but was passed as a keyword"): - self.call(test, 1, 2, foo=4, bar=5, c_po=10) + self.assertEqual(self.call(test, 1, 2, foo=4, bar=5, c_po=10), + (1, 2, 3, 4, 5, {'c_po': 10})) - with self.assertRaisesRegex(TypeError, "parameter is positional only"): - self.call(test, 1, 2, c_po=4) + self.assertEqual(self.call(test, 1, 2, 30, c_po=31, foo=4, bar=5), + (1, 2, 30, 4, 5, {'c_po': 31})) - with self.assertRaisesRegex(TypeError, "parameter is positional only"): + self.assertEqual(self.call(test, 1, 2, 30, foo=4, bar=5, c_po=31), + (1, 2, 30, 4, 5, {'c_po': 31})) + + self.assertEqual(self.call(test, 1, 2, c_po=4), + (1, 2, 3, 42, 50, {'c_po': 4})) + + with self.assertRaisesRegex(TypeError, "missing 2 required positional arguments"): self.call(test, a_po=1, b_po=2) + def without_var_kwargs(c_po=3, d_po=4, /): + return c_po, d_po + + with self.assertRaisesRegex( + TypeError, + "positional-only arguments passed as keyword arguments: 'c_po, d_po'", + ): + self.call(without_var_kwargs, c_po=33, d_po=44) + def test_signature_bind_with_self_arg(self): # Issue #17071: one of the parameters is named "self def test(a, self, b): diff --git a/Lib/test/test_itertools.py b/Lib/test/test_itertools.py index e243da309f03d8..2c92d880c10cb3 100644 --- a/Lib/test/test_itertools.py +++ b/Lib/test/test_itertools.py @@ -644,7 +644,7 @@ def test_count(self): count(1, maxsize+5); sys.exc_info() @pickle_deprecated - def test_count_with_stride(self): + def test_count_with_step(self): self.assertEqual(lzip('abc',count(2,3)), [('a', 2), ('b', 5), ('c', 8)]) self.assertEqual(lzip('abc',count(start=2,step=3)), [('a', 2), ('b', 5), ('c', 8)]) @@ -699,6 +699,28 @@ def test_count_with_stride(self): for proto in range(pickle.HIGHEST_PROTOCOL + 1): self.pickletest(proto, count(i, j)) + @threading_helper.requires_working_threading() + def test_count_threading(self, step=1): + # this test verifies multithreading consistency, which is + # mostly for testing builds without GIL, but nice to test anyway + count_to = 10_000 + num_threads = 10 + c = count(step=step) + def counting_thread(): + for i in range(count_to): + next(c) + threads = [] + for i in range(num_threads): + thread = threading.Thread(target=counting_thread) + thread.start() + threads.append(thread) + for thread in threads: + thread.join() + self.assertEqual(next(c), count_to * num_threads * step) + + def test_count_with_step_threading(self): + self.test_count_threading(step=5) + def test_cycle(self): self.assertEqual(take(10, cycle('abc')), list('abcabcabca')) self.assertEqual(list(cycle('')), []) diff --git a/Lib/test/test_launcher.py b/Lib/test/test_launcher.py index 2528a51240fbf7..58baae25df3df7 100644 --- a/Lib/test/test_launcher.py +++ b/Lib/test/test_launcher.py @@ -764,3 +764,11 @@ def test_shebang_command_in_venv(self): with self.script(f'#! /usr/bin/env {exe.stem} arg1') as script: data = self.run_py([script], env=env) self.assertEqual(data["stdout"].strip(), f"{quote(exe)} arg1 {quote(script)}") + + def test_shebang_executable_extension(self): + with self.script('#! /usr/bin/env python3.99') as script: + data = self.run_py([script], expect_returncode=103) + expect = "# Search PATH for python3.99.exe" + actual = [line.strip() for line in data["stderr"].splitlines() + if line.startswith("# Search PATH")] + self.assertEqual([expect], actual) diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py index e651d96f100a83..ef2d4a621be962 100644 --- a/Lib/test/test_logging.py +++ b/Lib/test/test_logging.py @@ -657,15 +657,15 @@ def test_builtin_handlers(self): self.assertFalse(h.shouldFlush(r)) h.close() - def test_path_objects(self): + def test_pathlike_objects(self): """ - Test that Path objects are accepted as filename arguments to handlers. + Test that path-like objects are accepted as filename arguments to handlers. See Issue #27493. """ fn = make_temp_file() os.unlink(fn) - pfn = pathlib.Path(fn) + pfn = os_helper.FakePath(fn) cases = ( (logging.FileHandler, (pfn, 'w')), (logging.handlers.RotatingFileHandler, (pfn, 'a')), @@ -3926,6 +3926,36 @@ def test_config_queue_handler(self): msg = str(ctx.exception) self.assertEqual(msg, "Unable to configure handler 'ah'") + def test_multiprocessing_queues(self): + # See gh-119819 + + # will skip test if it's not available + import_helper.import_module('_multiprocessing') + + cd = copy.deepcopy(self.config_queue_handler) + from multiprocessing import Queue as MQ, Manager as MM + q1 = MQ() # this can't be pickled + q2 = MM().Queue() # a proxy queue for use when pickling is needed + q3 = MM().JoinableQueue() # a joinable proxy queue + for qspec in (q1, q2, q3): + fn = make_temp_file('.log', 'test_logging-cmpqh-') + cd['handlers']['h1']['filename'] = fn + cd['handlers']['ah']['queue'] = qspec + qh = None + try: + self.apply_config(cd) + qh = logging.getHandlerByName('ah') + self.assertEqual(sorted(logging.getHandlerNames()), ['ah', 'h1']) + self.assertIsNotNone(qh.listener) + self.assertIs(qh.queue, qspec) + self.assertIs(qh.listener.queue, qspec) + finally: + h = logging.getHandlerByName('h1') + if h: + self.addCleanup(closeFileHandler, h, fn) + else: + self.addCleanup(os.remove, fn) + def test_90195(self): # See gh-90195 config = { @@ -3976,6 +4006,35 @@ def test_111615(self): } logging.config.dictConfig(config) + # gh-118868: check if kwargs are passed to logging QueueHandler + def test_kwargs_passing(self): + class CustomQueueHandler(logging.handlers.QueueHandler): + def __init__(self, *args, **kwargs): + super().__init__(queue.Queue()) + self.custom_kwargs = kwargs + + custom_kwargs = {'foo': 'bar'} + + config = { + 'version': 1, + 'handlers': { + 'custom': { + 'class': CustomQueueHandler, + **custom_kwargs + }, + }, + 'root': { + 'level': 'DEBUG', + 'handlers': ['custom'] + } + } + + logging.config.dictConfig(config) + + handler = logging.getHandlerByName('custom') + self.assertEqual(handler.custom_kwargs, custom_kwargs) + + class ManagerTest(BaseTest): def test_manager_loggerclass(self): logged = [] diff --git a/Lib/test/test_long.py b/Lib/test/test_long.py index d299c34cec076d..41b973da2c7df0 100644 --- a/Lib/test/test_long.py +++ b/Lib/test/test_long.py @@ -1639,6 +1639,8 @@ class MyInt(int): MyInt.__basicsize__ + MyInt.__itemsize__ * ndigits ) + # GH-117195 -- This shouldn't crash + object.__sizeof__(1) if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_mimetypes.py b/Lib/test/test_mimetypes.py index 2e0ad0606ae9c2..58f6a4dfae08ba 100644 --- a/Lib/test/test_mimetypes.py +++ b/Lib/test/test_mimetypes.py @@ -1,7 +1,6 @@ import io import mimetypes import os -import pathlib import sys import unittest.mock @@ -83,11 +82,19 @@ def test_read_mime_types(self): with os_helper.temp_dir() as directory: data = "x-application/x-unittest pyunit\n" - file = pathlib.Path(directory, "sample.mimetype") - file.write_text(data, encoding="utf-8") + file = os.path.join(directory, "sample.mimetype") + with open(file, 'w', encoding="utf-8") as f: + f.write(data) mime_dict = mimetypes.read_mime_types(file) eq(mime_dict[".pyunit"], "x-application/x-unittest") + data = "x-application/x-unittest2 pyunit2\n" + file = os.path.join(directory, "sample2.mimetype") + with open(file, 'w', encoding="utf-8") as f: + f.write(data) + mime_dict = mimetypes.read_mime_types(os_helper.FakePath(file)) + eq(mime_dict[".pyunit2"], "x-application/x-unittest2") + # bpo-41048: read_mime_types should read the rule file with 'utf-8' encoding. # Not with locale encoding. _bootlocale has been imported because io.open(...) # uses it. diff --git a/Lib/test/test_mmap.py b/Lib/test/test_mmap.py index ee86227e026b67..a1cf5384ada5b5 100644 --- a/Lib/test/test_mmap.py +++ b/Lib/test/test_mmap.py @@ -3,6 +3,7 @@ ) from test.support.import_helper import import_module from test.support.os_helper import TESTFN, unlink +from test.support.script_helper import assert_python_ok import unittest import errno import os @@ -12,6 +13,7 @@ import socket import string import sys +import textwrap import weakref # Skip test if we can't import mmap. @@ -1058,6 +1060,81 @@ def __exit__(self, exc_type, exc_value, traceback): with self.assertRaisesRegex(ValueError, "mmap closed or invalid"): m.write_byte(X()) + @unittest.skipUnless(os.name == 'nt', 'requires Windows') + @unittest.skipUnless(hasattr(mmap.mmap, '_protect'), 'test needs debug build') + def test_access_violations(self): + from test.support.os_helper import TESTFN + + code = textwrap.dedent(""" + import faulthandler + import mmap + import os + import sys + from contextlib import suppress + + # Prevent logging access violations to stderr. + faulthandler.disable() + + PAGESIZE = mmap.PAGESIZE + PAGE_NOACCESS = 0x01 + + with open(sys.argv[1], 'bw+') as f: + f.write(b'A'* PAGESIZE) + f.flush() + + m = mmap.mmap(f.fileno(), PAGESIZE) + m._protect(PAGE_NOACCESS, 0, PAGESIZE) + with suppress(OSError): + m.read(PAGESIZE) + assert False, 'mmap.read() did not raise' + with suppress(OSError): + m.read_byte() + assert False, 'mmap.read_byte() did not raise' + with suppress(OSError): + m.readline() + assert False, 'mmap.readline() did not raise' + with suppress(OSError): + m.write(b'A'* PAGESIZE) + assert False, 'mmap.write() did not raise' + with suppress(OSError): + m.write_byte(0) + assert False, 'mmap.write_byte() did not raise' + with suppress(OSError): + m[0] # test mmap_subscript + assert False, 'mmap.__getitem__() did not raise' + with suppress(OSError): + m[0:10] # test mmap_subscript + assert False, 'mmap.__getitem__() did not raise' + with suppress(OSError): + m[0:10:2] # test mmap_subscript + assert False, 'mmap.__getitem__() did not raise' + with suppress(OSError): + m[0] = 1 + assert False, 'mmap.__setitem__() did not raise' + with suppress(OSError): + m[0:10] = b'A'* 10 + assert False, 'mmap.__setitem__() did not raise' + with suppress(OSError): + m[0:10:2] = b'A'* 5 + assert False, 'mmap.__setitem__() did not raise' + with suppress(OSError): + m.move(0, 10, 1) + assert False, 'mmap.move() did not raise' + with suppress(OSError): + list(m) # test mmap_item + assert False, 'mmap.__getitem__() did not raise' + with suppress(OSError): + m.find(b'A') + assert False, 'mmap.find() did not raise' + with suppress(OSError): + m.rfind(b'A') + assert False, 'mmap.rfind() did not raise' + """) + rt, stdout, stderr = assert_python_ok("-c", code, TESTFN) + self.assertEqual(stdout.strip(), b'') + self.assertEqual(stderr.strip(), b'') + + class LargeMmapTests(unittest.TestCase): def setUp(self): diff --git a/Lib/test/test_module/__init__.py b/Lib/test/test_module/__init__.py index 98d1cbe824df12..952ba43f72504d 100644 --- a/Lib/test/test_module/__init__.py +++ b/Lib/test/test_module/__init__.py @@ -4,6 +4,7 @@ import weakref from test.support import gc_collect from test.support import import_helper +from test.support import suppress_immortalization from test.support.script_helper import assert_python_ok import sys @@ -103,6 +104,7 @@ def f(): gc_collect() self.assertEqual(f().__dict__["bar"], 4) + @suppress_immortalization() def test_clear_dict_in_ref_cycle(self): destroyed = [] m = ModuleType("foo") @@ -118,6 +120,7 @@ def __del__(self): gc_collect() self.assertEqual(destroyed, [1]) + @suppress_immortalization() def test_weakref(self): m = ModuleType("foo") wr = weakref.ref(m) diff --git a/Lib/test/test_monitoring.py b/Lib/test/test_monitoring.py index 6974bc5517ae5f..b7c6abed1016dc 100644 --- a/Lib/test/test_monitoring.py +++ b/Lib/test/test_monitoring.py @@ -1938,19 +1938,28 @@ def setUp(self): ( 1, E.RAISE, capi.fire_event_raise, ValueError(2)), ( 1, E.EXCEPTION_HANDLED, capi.fire_event_exception_handled, ValueError(5)), ( 1, E.PY_UNWIND, capi.fire_event_py_unwind, ValueError(6)), - ( 1, E.STOP_ITERATION, capi.fire_event_stop_iteration, ValueError(7)), + ( 1, E.STOP_ITERATION, capi.fire_event_stop_iteration, 7), + ( 1, E.STOP_ITERATION, capi.fire_event_stop_iteration, StopIteration(8)), ] + self.EXPECT_RAISED_EXCEPTION = [E.PY_THROW, E.RAISE, E.EXCEPTION_HANDLED, E.PY_UNWIND] - def check_event_count(self, event, func, args, expected): + + def check_event_count(self, event, func, args, expected, callback_raises=None): class Counter: - def __init__(self): + def __init__(self, callback_raises): + self.callback_raises = callback_raises self.count = 0 + def __call__(self, *args): self.count += 1 + if self.callback_raises: + exc = self.callback_raises + self.callback_raises = None + raise exc try: - counter = Counter() + counter = Counter(callback_raises) sys.monitoring.register_callback(TEST_TOOL, event, counter) if event == E.C_RETURN or event == E.C_RAISE: sys.monitoring.set_events(TEST_TOOL, E.CALL) @@ -1987,8 +1996,9 @@ def test_fire_event(self): def test_missing_exception(self): for _, event, function, *args in self.cases: - if not (args and isinstance(args[-1], BaseException)): + if event not in self.EXPECT_RAISED_EXCEPTION: continue + assert args and isinstance(args[-1], BaseException) offset = 0 self.codelike = _testcapi.CodeLike(1) with self.subTest(function.__name__): @@ -1997,6 +2007,17 @@ def test_missing_exception(self): expected = ValueError(f"Firing event {evt} with no exception set") self.check_event_count(event, function, args_, expected) + def test_fire_event_failing_callback(self): + for expected, event, function, *args in self.cases: + offset = 0 + self.codelike = _testcapi.CodeLike(1) + with self.subTest(function.__name__): + args_ = (self.codelike, offset) + tuple(args) + exc = OSError(42) + with self.assertRaises(type(exc)): + self.check_event_count(event, function, args_, expected, + callback_raises=exc) + CANNOT_DISABLE = { E.PY_THROW, E.RAISE, E.RERAISE, E.EXCEPTION_HANDLED, E.PY_UNWIND } diff --git a/Lib/test/test_ntpath.py b/Lib/test/test_ntpath.py index 7f91bf1c2b837a..64cbfaaaaa0690 100644 --- a/Lib/test/test_ntpath.py +++ b/Lib/test/test_ntpath.py @@ -1095,6 +1095,27 @@ def test_isfile_driveletter(self): raise unittest.SkipTest('SystemDrive is not defined or malformed') self.assertFalse(os.path.isfile('\\\\.\\' + drive)) + @unittest.skipUnless(hasattr(os, 'pipe'), "need os.pipe()") + def test_isfile_anonymous_pipe(self): + pr, pw = os.pipe() + try: + self.assertFalse(ntpath.isfile(pr)) + finally: + os.close(pr) + os.close(pw) + + @unittest.skipIf(sys.platform != 'win32', "windows only") + def test_isfile_named_pipe(self): + import _winapi + named_pipe = f'//./PIPE/python_isfile_test_{os.getpid()}' + h = _winapi.CreateNamedPipe(named_pipe, + _winapi.PIPE_ACCESS_INBOUND, + 0, 1, 0, 0, 0, 0) + try: + self.assertFalse(ntpath.isfile(named_pipe)) + finally: + _winapi.CloseHandle(h) + @unittest.skipIf(sys.platform != 'win32', "windows only") def test_con_device(self): self.assertFalse(os.path.isfile(r"\\.\CON")) @@ -1108,14 +1129,22 @@ def test_fast_paths_in_use(self): # There are fast paths of these functions implemented in posixmodule.c. # Confirm that they are being used, and not the Python fallbacks in # genericpath.py. + self.assertTrue(os.path.splitroot is nt._path_splitroot_ex) + self.assertFalse(inspect.isfunction(os.path.splitroot)) + self.assertTrue(os.path.normpath is nt._path_normpath) + self.assertFalse(inspect.isfunction(os.path.normpath)) self.assertTrue(os.path.isdir is nt._path_isdir) self.assertFalse(inspect.isfunction(os.path.isdir)) self.assertTrue(os.path.isfile is nt._path_isfile) self.assertFalse(inspect.isfunction(os.path.isfile)) self.assertTrue(os.path.islink is nt._path_islink) self.assertFalse(inspect.isfunction(os.path.islink)) + self.assertTrue(os.path.isjunction is nt._path_isjunction) + self.assertFalse(inspect.isfunction(os.path.isjunction)) self.assertTrue(os.path.exists is nt._path_exists) self.assertFalse(inspect.isfunction(os.path.exists)) + self.assertTrue(os.path.lexists is nt._path_lexists) + self.assertFalse(inspect.isfunction(os.path.lexists)) @unittest.skipIf(os.name != 'nt', "Dev Drives only exist on Win32") def test_isdevdrive(self): diff --git a/Lib/test/test_ordered_dict.py b/Lib/test/test_ordered_dict.py index 4571b23dfe7c1a..06a0e81227188c 100644 --- a/Lib/test/test_ordered_dict.py +++ b/Lib/test/test_ordered_dict.py @@ -10,7 +10,7 @@ import weakref from collections.abc import MutableMapping from test import mapping_tests, support -from test.support import import_helper +from test.support import import_helper, suppress_immortalization py_coll = import_helper.import_fresh_module('collections', @@ -667,6 +667,7 @@ def test_dict_update(self): dict.update(od, [('spam', 1)]) self.assertNotIn('NULL', repr(od)) + @suppress_immortalization() def test_reference_loop(self): # Issue 25935 OrderedDict = self.OrderedDict diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py index 9c9c8536dc7542..de5a86f676c4d5 100644 --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -1685,10 +1685,29 @@ def test_fd_leak(self): self.addCleanup(os.close, newfd) self.assertEqual(newfd, minfd) + @unittest.skipIf( + support.is_emscripten, "Cannot dup stdout on Emscripten" + ) + @unittest.skipIf( + support.is_android, "dup return value is unpredictable on Android" + ) + def test_fd_finalization(self): + # Check that close()ing the fwalk() generator closes FDs + def getfd(): + fd = os.dup(1) + os.close(fd) + return fd + for topdown in (False, True): + old_fd = getfd() + it = self.fwalk(os_helper.TESTFN, topdown=topdown) + self.assertEqual(getfd(), old_fd) + next(it) + self.assertGreater(getfd(), old_fd) + it.close() + self.assertEqual(getfd(), old_fd) + # fwalk() keeps file descriptors open test_walk_many_open_files = None - # fwalk() still uses recursion - test_walk_above_recursion_limit = None class BytesWalkTests(WalkTests): @@ -1814,21 +1833,14 @@ def test_exist_ok_existing_regular_file(self): @unittest.skipUnless(os.name == 'nt', "requires Windows") def test_win32_mkdir_700(self): base = os_helper.TESTFN - path1 = os.path.join(os_helper.TESTFN, 'dir1') - path2 = os.path.join(os_helper.TESTFN, 'dir2') - # mode=0o700 is special-cased to override ACLs on Windows - # There's no way to know exactly how the ACLs will look, so we'll - # check that they are different from a regularly created directory. - os.mkdir(path1, mode=0o700) - os.mkdir(path2, mode=0o777) - - out1 = subprocess.check_output(["icacls.exe", path1], encoding="oem") - out2 = subprocess.check_output(["icacls.exe", path2], encoding="oem") - os.rmdir(path1) - os.rmdir(path2) - out1 = out1.replace(path1, "") - out2 = out2.replace(path2, "") - self.assertNotEqual(out1, out2) + path = os.path.abspath(os.path.join(os_helper.TESTFN, 'dir')) + os.mkdir(path, mode=0o700) + out = subprocess.check_output(["cacls.exe", path, "/s"], encoding="oem") + os.rmdir(path) + self.assertEqual( + out.strip(), + f'{path} "D:P(A;OICI;FA;;;SY)(A;OICI;FA;;;BA)(A;OICI;FA;;;OW)"', + ) def tearDown(self): path = os.path.join(os_helper.TESTFN, 'dir1', 'dir2', 'dir3', @@ -3226,9 +3238,8 @@ def test_stat_inaccessible_file(self): self.skipTest("Unable to create inaccessible file") def cleanup(): - # Give delete permission. We are the file owner, so we can do this - # even though we removed all permissions earlier. - subprocess.check_output([ICACLS, filename, "/grant", "Everyone:(D)"], + # Give delete permission to the owner (us) + subprocess.check_output([ICACLS, filename, "/grant", "*WD:(D)"], stderr=subprocess.STDOUT) os.unlink(filename) diff --git a/Lib/test/test_pathlib/test_pathlib_abc.py b/Lib/test/test_pathlib/test_pathlib_abc.py index aadecbc142cca6..d9e51c0e3d6411 100644 --- a/Lib/test/test_pathlib/test_pathlib_abc.py +++ b/Lib/test/test_pathlib/test_pathlib_abc.py @@ -999,6 +999,7 @@ def test_with_suffix_windows(self): self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c\\d') self.assertRaises(ValueError, P('c:a/b').with_suffix, '.c/d') self.assertRaises(ValueError, P('c:a/b').with_suffix, '.c\\d') + self.assertRaises(TypeError, P('c:a/b').with_suffix, None) def test_with_suffix_empty(self): P = self.cls @@ -1006,7 +1007,7 @@ def test_with_suffix_empty(self): self.assertRaises(ValueError, P('').with_suffix, '.gz') self.assertRaises(ValueError, P('/').with_suffix, '.gz') - def test_with_suffix_seps(self): + def test_with_suffix_invalid(self): P = self.cls # Invalid suffix. self.assertRaises(ValueError, P('a/b').with_suffix, 'gz') @@ -1017,6 +1018,7 @@ def test_with_suffix_seps(self): self.assertRaises(ValueError, P('a/b').with_suffix, '.c/.d') self.assertRaises(ValueError, P('a/b').with_suffix, './.d') self.assertRaises(ValueError, P('a/b').with_suffix, '.d/.') + self.assertRaises(TypeError, P('a/b').with_suffix, None) def test_relative_to_common(self): P = self.cls diff --git a/Lib/test/test_pdb.py b/Lib/test/test_pdb.py index f47466410082ef..1b329b205d2d0f 100644 --- a/Lib/test/test_pdb.py +++ b/Lib/test/test_pdb.py @@ -1453,6 +1453,58 @@ def test_post_mortem(): """ +def test_pdb_return_to_different_file(): + """When pdb returns to a different file, it should not skip if f_trace is + not already set + + >>> import pprint + + >>> class A: + ... def __repr__(self): + ... return 'A' + + >>> def test_function(): + ... import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + ... pprint.pprint(A()) + + >>> reset_Breakpoint() + >>> with PdbTestInput([ # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE + ... 'b A.__repr__', + ... 'continue', + ... 'return', + ... 'next', + ... 'return', + ... 'return', + ... 'continue', + ... ]): + ... test_function() + > (2)test_function() + -> import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + (Pdb) b A.__repr__ + Breakpoint 1 at :3 + (Pdb) continue + > (3)__repr__() + -> return 'A' + (Pdb) return + --Return-- + > (3)__repr__()->'A' + -> return 'A' + (Pdb) next + > ...pprint.py..._safe_repr() + -> return rep,... + (Pdb) return + --Return-- + > ...pprint.py..._safe_repr()->('A'...) + -> return rep,... + (Pdb) return + --Return-- + > ...pprint.py...format()->('A'...) + -> return... + (Pdb) continue + A + """ + + def test_pdb_skip_modules(): """This illustrates the simple case of module skipping. diff --git a/Lib/test/test_pkgutil.py b/Lib/test/test_pkgutil.py index e19dce1dbd2583..d095f440a99f63 100644 --- a/Lib/test/test_pkgutil.py +++ b/Lib/test/test_pkgutil.py @@ -13,6 +13,7 @@ import zipfile from test.support.import_helper import DirsOnSysPath +from test.support.os_helper import FakePath from test.test_importlib.util import uncache # Note: pkgutil.walk_packages is currently tested in test_runpy. This is @@ -121,7 +122,7 @@ def test_issue44061_iter_modules(self): # make sure iter_modules accepts Path objects names = [] - for moduleinfo in pkgutil.iter_modules([Path(zip_file)]): + for moduleinfo in pkgutil.iter_modules([FakePath(zip_file)]): self.assertIsInstance(moduleinfo, pkgutil.ModuleInfo) names.append(moduleinfo.name) self.assertEqual(names, [pkg]) diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py index 7e5f04c22bd6d3..908354cb8574d1 100644 --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -704,7 +704,8 @@ def test_makedev(self): self.assertEqual(posix.major(dev), major) self.assertRaises(TypeError, posix.major, float(dev)) self.assertRaises(TypeError, posix.major) - self.assertRaises((ValueError, OverflowError), posix.major, -1) + for x in -2, 2**64, -2**63-1: + self.assertRaises((ValueError, OverflowError), posix.major, x) minor = posix.minor(dev) self.assertIsInstance(minor, int) @@ -712,13 +713,23 @@ def test_makedev(self): self.assertEqual(posix.minor(dev), minor) self.assertRaises(TypeError, posix.minor, float(dev)) self.assertRaises(TypeError, posix.minor) - self.assertRaises((ValueError, OverflowError), posix.minor, -1) + for x in -2, 2**64, -2**63-1: + self.assertRaises((ValueError, OverflowError), posix.minor, x) self.assertEqual(posix.makedev(major, minor), dev) self.assertRaises(TypeError, posix.makedev, float(major), minor) self.assertRaises(TypeError, posix.makedev, major, float(minor)) self.assertRaises(TypeError, posix.makedev, major) self.assertRaises(TypeError, posix.makedev) + for x in -2, 2**32, 2**64, -2**63-1: + self.assertRaises((ValueError, OverflowError), posix.makedev, x, minor) + self.assertRaises((ValueError, OverflowError), posix.makedev, major, x) + + if sys.platform == 'linux': + NODEV = -1 + self.assertEqual(posix.major(NODEV), NODEV) + self.assertEqual(posix.minor(NODEV), NODEV) + self.assertEqual(posix.makedev(NODEV, NODEV), NODEV) def _test_all_chown_common(self, chown_func, first_param, stat_func): """Common code for chown, fchown and lchown tests.""" diff --git a/Lib/test/test_posixpath.py b/Lib/test/test_posixpath.py index 32a20efbb64e1d..57a24e9c70d5e5 100644 --- a/Lib/test/test_posixpath.py +++ b/Lib/test/test_posixpath.py @@ -1,3 +1,4 @@ +import inspect import os import posixpath import sys @@ -5,7 +6,7 @@ from posixpath import realpath, abspath, dirname, basename from test import test_genericpath from test.support import import_helper -from test.support import os_helper +from test.support import cpython_only, os_helper from test.support.os_helper import FakePath from unittest import mock @@ -283,6 +284,16 @@ def fake_lstat(path): def test_isjunction(self): self.assertFalse(posixpath.isjunction(ABSTFN)) + @unittest.skipIf(sys.platform == 'win32', "Fast paths are not for win32") + @cpython_only + def test_fast_paths_in_use(self): + # There are fast paths of these functions implemented in posixmodule.c. + # Confirm that they are being used, and not the Python fallbacks + self.assertTrue(os.path.splitroot is posix._path_splitroot_ex) + self.assertFalse(inspect.isfunction(os.path.splitroot)) + self.assertTrue(os.path.normpath is posix._path_normpath) + self.assertFalse(inspect.isfunction(os.path.normpath)) + def test_expanduser(self): self.assertEqual(posixpath.expanduser("foo"), "foo") self.assertEqual(posixpath.expanduser(b"foo"), b"foo") @@ -660,6 +671,24 @@ def test_realpath_resolve_first(self): safe_rmdir(ABSTFN + "/k") safe_rmdir(ABSTFN) + @os_helper.skip_unless_symlink + @skip_if_ABSTFN_contains_backslash + @unittest.skipIf(os.chmod not in os.supports_follow_symlinks, "Can't set symlink permissions") + @unittest.skipIf(sys.platform != "darwin", "only macOS requires read permission to readlink()") + def test_realpath_unreadable_symlink(self): + try: + os.symlink(ABSTFN+"1", ABSTFN) + os.chmod(ABSTFN, 0o000, follow_symlinks=False) + self.assertEqual(realpath(ABSTFN), ABSTFN) + self.assertEqual(realpath(ABSTFN + '/foo'), ABSTFN + '/foo') + self.assertEqual(realpath(ABSTFN + '/../foo'), dirname(ABSTFN) + '/foo') + self.assertEqual(realpath(ABSTFN + '/foo/..'), ABSTFN) + with self.assertRaises(PermissionError): + realpath(ABSTFN, strict=True) + finally: + os.chmod(ABSTFN, 0o755, follow_symlinks=False) + os.unlink(ABSTFN) + def test_relpath(self): (real_getcwd, os.getcwd) = (os.getcwd, lambda: r"/home/user/bar") try: diff --git a/Lib/test/test_pyrepl/__init__.py b/Lib/test/test_pyrepl/__init__.py new file mode 100644 index 00000000000000..8359d9844623c2 --- /dev/null +++ b/Lib/test/test_pyrepl/__init__.py @@ -0,0 +1,15 @@ +import os +import sys +from test.support import requires, load_package_tests +from test.support.import_helper import import_module + +if sys.platform != "win32": + # On non-Windows platforms, testing pyrepl currently requires that the + # 'curses' resource be given on the regrtest command line using the -u + # option. Additionally, we need to attempt to import curses and readline. + requires("curses") + curses = import_module("curses") + + +def load_tests(*args): + return load_package_tests(os.path.dirname(__file__), *args) diff --git a/Lib/test/test_pyrepl/__main__.py b/Lib/test/test_pyrepl/__main__.py new file mode 100644 index 00000000000000..cbe9e01d0df820 --- /dev/null +++ b/Lib/test/test_pyrepl/__main__.py @@ -0,0 +1,4 @@ +import unittest +from test.test_pyrepl import load_tests + +unittest.main() diff --git a/Lib/test/test_pyrepl/support.py b/Lib/test/test_pyrepl/support.py new file mode 100644 index 00000000000000..70e12286f7d781 --- /dev/null +++ b/Lib/test/test_pyrepl/support.py @@ -0,0 +1,143 @@ +from code import InteractiveConsole +from functools import partial +from typing import Iterable +from unittest.mock import MagicMock + +from _pyrepl.console import Console, Event +from _pyrepl.readline import ReadlineAlikeReader, ReadlineConfig +from _pyrepl.simple_interact import _strip_final_indent + + +def multiline_input(reader: ReadlineAlikeReader, namespace: dict | None = None): + saved = reader.more_lines + try: + reader.more_lines = partial(more_lines, namespace=namespace) + reader.ps1 = reader.ps2 = ">>>" + reader.ps3 = reader.ps4 = "..." + return reader.readline() + finally: + reader.more_lines = saved + reader.paste_mode = False + + +def more_lines(text: str, namespace: dict | None = None): + if namespace is None: + namespace = {} + src = _strip_final_indent(text) + console = InteractiveConsole(namespace, filename="") + try: + code = console.compile(src, "", "single") + except (OverflowError, SyntaxError, ValueError): + return False + else: + return code is None + + +def code_to_events(code: str): + for c in code: + yield Event(evt="key", data=c, raw=bytearray(c.encode("utf-8"))) + + +def prepare_reader(console: Console, **kwargs): + config = ReadlineConfig(readline_completer=kwargs.pop("readline_completer", None)) + reader = ReadlineAlikeReader(console=console, config=config) + reader.more_lines = partial(more_lines, namespace=None) + reader.paste_mode = True # Avoid extra indents + + def get_prompt(lineno, cursor_on_line) -> str: + return "" + + reader.get_prompt = get_prompt # Remove prompt for easier calculations of (x, y) + + for key, val in kwargs.items(): + setattr(reader, key, val) + + return reader + + +def prepare_console(events: Iterable[Event], **kwargs) -> MagicMock | Console: + console = MagicMock() + console.get_event.side_effect = events + console.height = 100 + console.width = 80 + for key, val in kwargs.items(): + setattr(console, key, val) + return console + + +def handle_all_events( + events, prepare_console=prepare_console, prepare_reader=prepare_reader +): + console = prepare_console(events) + reader = prepare_reader(console) + try: + while True: + reader.handle1() + except StopIteration: + pass + except KeyboardInterrupt: + pass + return reader, console + + +handle_events_narrow_console = partial( + handle_all_events, + prepare_console=partial(prepare_console, width=10), +) + + +class FakeConsole(Console): + def __init__(self, events, encoding="utf-8"): + self.events = iter(events) + self.encoding = encoding + self.screen = [] + self.height = 100 + self.width = 80 + + def get_event(self, block: bool = True) -> Event | None: + return next(self.events) + + def getpending(self) -> Event: + return self.get_event(block=False) + + def getheightwidth(self) -> tuple[int, int]: + return self.height, self.width + + def refresh(self, screen: list[str], xy: tuple[int, int]) -> None: + pass + + def prepare(self) -> None: + pass + + def restore(self) -> None: + pass + + def move_cursor(self, x: int, y: int) -> None: + pass + + def set_cursor_vis(self, visible: bool) -> None: + pass + + def push_char(self, char: int | bytes) -> None: + pass + + def beep(self) -> None: + pass + + def clear(self) -> None: + pass + + def finish(self) -> None: + pass + + def flushoutput(self) -> None: + pass + + def forgetinput(self) -> None: + pass + + def wait(self) -> None: + pass + + def repaint(self) -> None: + pass diff --git a/Lib/test/test_pyrepl/test_input.py b/Lib/test/test_pyrepl/test_input.py new file mode 100644 index 00000000000000..c78c876c2c4c2a --- /dev/null +++ b/Lib/test/test_pyrepl/test_input.py @@ -0,0 +1,102 @@ +import unittest + +from _pyrepl.console import Event +from _pyrepl.input import KeymapTranslator + + +class KeymapTranslatorTests(unittest.TestCase): + def test_push_single_key(self): + keymap = [("a", "command_a")] + translator = KeymapTranslator(keymap) + evt = Event("key", "a") + translator.push(evt) + result = translator.get() + self.assertEqual(result, ("command_a", ["a"])) + + def test_push_multiple_keys(self): + keymap = [("ab", "command_ab")] + translator = KeymapTranslator(keymap) + evt1 = Event("key", "a") + evt2 = Event("key", "b") + translator.push(evt1) + translator.push(evt2) + result = translator.get() + self.assertEqual(result, ("command_ab", ["a", "b"])) + + def test_push_invalid_key(self): + keymap = [("a", "command_a")] + translator = KeymapTranslator(keymap) + evt = Event("key", "b") + translator.push(evt) + result = translator.get() + self.assertEqual(result, (None, ["b"])) + + def test_push_invalid_key_with_stack(self): + keymap = [("ab", "command_ab")] + translator = KeymapTranslator(keymap) + evt1 = Event("key", "a") + evt2 = Event("key", "c") + translator.push(evt1) + translator.push(evt2) + result = translator.get() + self.assertEqual(result, (None, ["a", "c"])) + + def test_push_character_key(self): + keymap = [("a", "command_a")] + translator = KeymapTranslator(keymap) + evt = Event("key", "a") + translator.push(evt) + result = translator.get() + self.assertEqual(result, ("command_a", ["a"])) + + def test_push_character_key_with_stack(self): + keymap = [("ab", "command_ab")] + translator = KeymapTranslator(keymap) + evt1 = Event("key", "a") + evt2 = Event("key", "b") + evt3 = Event("key", "c") + translator.push(evt1) + translator.push(evt2) + translator.push(evt3) + result = translator.get() + self.assertEqual(result, ("command_ab", ["a", "b"])) + + def test_push_transition_key(self): + keymap = [("a", {"b": "command_ab"})] + translator = KeymapTranslator(keymap) + evt1 = Event("key", "a") + evt2 = Event("key", "b") + translator.push(evt1) + translator.push(evt2) + result = translator.get() + self.assertEqual(result, ("command_ab", ["a", "b"])) + + def test_push_transition_key_interrupted(self): + keymap = [("a", {"b": "command_ab"})] + translator = KeymapTranslator(keymap) + evt1 = Event("key", "a") + evt2 = Event("key", "c") + evt3 = Event("key", "b") + translator.push(evt1) + translator.push(evt2) + translator.push(evt3) + result = translator.get() + self.assertEqual(result, (None, ["a", "c"])) + + def test_push_invalid_key_with_unicode_category(self): + keymap = [("a", "command_a")] + translator = KeymapTranslator(keymap) + evt = Event("key", "\u0003") # Control character + translator.push(evt) + result = translator.get() + self.assertEqual(result, (None, ["\u0003"])) + + def test_empty(self): + keymap = [("a", "command_a")] + translator = KeymapTranslator(keymap) + self.assertTrue(translator.empty()) + evt = Event("key", "a") + translator.push(evt) + self.assertFalse(translator.empty()) + translator.get() + self.assertTrue(translator.empty()) diff --git a/Lib/test/test_pyrepl/test_interact.py b/Lib/test/test_pyrepl/test_interact.py new file mode 100644 index 00000000000000..df97b1354a168e --- /dev/null +++ b/Lib/test/test_pyrepl/test_interact.py @@ -0,0 +1,113 @@ +import contextlib +import io +import unittest +from unittest.mock import patch +from textwrap import dedent + +from test.support import force_not_colorized + +from _pyrepl.console import InteractiveColoredConsole + + +class TestSimpleInteract(unittest.TestCase): + def test_multiple_statements(self): + namespace = {} + code = dedent("""\ + class A: + def foo(self): + + + pass + + class B: + def bar(self): + pass + + a = 1 + a + """) + console = InteractiveColoredConsole(namespace, filename="") + f = io.StringIO() + with ( + patch.object(InteractiveColoredConsole, "showsyntaxerror") as showsyntaxerror, + patch.object(InteractiveColoredConsole, "runsource", wraps=console.runsource) as runsource, + contextlib.redirect_stdout(f), + ): + more = console.push(code, filename="", _symbol="single") # type: ignore[call-arg] + self.assertFalse(more) + showsyntaxerror.assert_not_called() + + + def test_multiple_statements_output(self): + namespace = {} + code = dedent("""\ + b = 1 + b + a = 1 + a + """) + console = InteractiveColoredConsole(namespace, filename="") + f = io.StringIO() + with contextlib.redirect_stdout(f): + more = console.push(code, filename="", _symbol="single") # type: ignore[call-arg] + self.assertFalse(more) + self.assertEqual(f.getvalue(), "1\n") + + def test_empty(self): + namespace = {} + code = "" + console = InteractiveColoredConsole(namespace, filename="") + f = io.StringIO() + with contextlib.redirect_stdout(f): + more = console.push(code, filename="", _symbol="single") # type: ignore[call-arg] + self.assertFalse(more) + self.assertEqual(f.getvalue(), "") + + def test_runsource_compiles_and_runs_code(self): + console = InteractiveColoredConsole() + source = "print('Hello, world!')" + with patch.object(console, "runcode") as mock_runcode: + console.runsource(source) + mock_runcode.assert_called_once() + + def test_runsource_returns_false_for_successful_compilation(self): + console = InteractiveColoredConsole() + source = "print('Hello, world!')" + f = io.StringIO() + with contextlib.redirect_stdout(f): + result = console.runsource(source) + self.assertFalse(result) + + @force_not_colorized + def test_runsource_returns_false_for_failed_compilation(self): + console = InteractiveColoredConsole() + source = "print('Hello, world!'" + f = io.StringIO() + with contextlib.redirect_stderr(f): + result = console.runsource(source) + self.assertFalse(result) + self.assertIn('SyntaxError', f.getvalue()) + + def test_runsource_shows_syntax_error_for_failed_compilation(self): + console = InteractiveColoredConsole() + source = "print('Hello, world!'" + with patch.object(console, "showsyntaxerror") as mock_showsyntaxerror: + console.runsource(source) + mock_showsyntaxerror.assert_called_once() + source = dedent("""\ + match 1: + case {0: _, 0j: _}: + pass + """) + with patch.object(console, "showsyntaxerror") as mock_showsyntaxerror: + console.runsource(source) + mock_showsyntaxerror.assert_called_once() + + def test_no_active_future(self): + console = InteractiveColoredConsole() + source = "x: int = 1; print(__annotations__)" + f = io.StringIO() + with contextlib.redirect_stdout(f): + result = console.runsource(source) + self.assertFalse(result) + self.assertEqual(f.getvalue(), "{'x': }\n") diff --git a/Lib/test/test_pyrepl/test_keymap.py b/Lib/test/test_pyrepl/test_keymap.py new file mode 100644 index 00000000000000..2c97066b2c7043 --- /dev/null +++ b/Lib/test/test_pyrepl/test_keymap.py @@ -0,0 +1,120 @@ +import string +import unittest + +from _pyrepl.keymap import _keynames, _escapes, parse_keys, compile_keymap, KeySpecError + + +class TestParseKeys(unittest.TestCase): + def test_single_character(self): + """Ensure that single ascii characters or single digits are parsed as single characters.""" + test_cases = [(key, [key]) for key in string.ascii_letters + string.digits] + for test_key, expected_keys in test_cases: + with self.subTest(f"{test_key} should be parsed as {expected_keys}"): + self.assertEqual(parse_keys(test_key), expected_keys) + + def test_keynames(self): + """Ensure that keynames are parsed to their corresponding mapping. + + A keyname is expected to be of the following form: \\ such as \\ + which would get parsed as "left". + """ + test_cases = [(f"\\<{keyname}>", [parsed_keyname]) for keyname, parsed_keyname in _keynames.items()] + for test_key, expected_keys in test_cases: + with self.subTest(f"{test_key} should be parsed as {expected_keys}"): + self.assertEqual(parse_keys(test_key), expected_keys) + + def test_escape_sequences(self): + """Ensure that escaping sequences are parsed to their corresponding mapping.""" + test_cases = [(f"\\{escape}", [parsed_escape]) for escape, parsed_escape in _escapes.items()] + for test_key, expected_keys in test_cases: + with self.subTest(f"{test_key} should be parsed as {expected_keys}"): + self.assertEqual(parse_keys(test_key), expected_keys) + + def test_control_sequences(self): + """Ensure that supported control sequences are parsed successfully.""" + keys = ["@", "[", "]", "\\", "^", "_", "\\", "\\"] + keys.extend(string.ascii_letters) + test_cases = [(f"\\C-{key}", chr(ord(key) & 0x1F)) for key in []] + for test_key, expected_keys in test_cases: + with self.subTest(f"{test_key} should be parsed as {expected_keys}"): + self.assertEqual(parse_keys(test_key), expected_keys) + + def test_meta_sequences(self): + self.assertEqual(parse_keys("\\M-a"), ["\033", "a"]) + self.assertEqual(parse_keys("\\M-b"), ["\033", "b"]) + self.assertEqual(parse_keys("\\M-c"), ["\033", "c"]) + + def test_combinations(self): + self.assertEqual(parse_keys("\\C-a\\n\\"), ["\x01", "\n", "up"]) + self.assertEqual(parse_keys("\\M-a\\t\\"), ["\033", "a", "\t", "down"]) + + def test_keyspec_errors(self): + cases = [ + ("\\Ca", "\\C must be followed by `-'"), + ("\\ca", "\\C must be followed by `-'"), + ("\\C-\\C-", "doubled \\C-"), + ("\\Ma", "\\M must be followed by `-'"), + ("\\ma", "\\M must be followed by `-'"), + ("\\M-\\M-", "doubled \\M-"), + ("\\", "unrecognised keyname"), + ("\\大", "unknown backslash escape"), + ("\\C-\\", "\\C- followed by invalid key") + ] + for test_keys, expected_err in cases: + with self.subTest(f"{test_keys} should give error {expected_err}"): + with self.assertRaises(KeySpecError) as e: + parse_keys(test_keys) + self.assertIn(expected_err, str(e.exception)) + + def test_index_errors(self): + test_cases = ["\\", "\\C", "\\C-\\C"] + for test_keys in test_cases: + with self.assertRaises(IndexError): + parse_keys(test_keys) + + +class TestCompileKeymap(unittest.TestCase): + def test_empty_keymap(self): + keymap = {} + result = compile_keymap(keymap) + self.assertEqual(result, {}) + + def test_single_keymap(self): + keymap = {b"a": "action"} + result = compile_keymap(keymap) + self.assertEqual(result, {b"a": "action"}) + + def test_nested_keymap(self): + keymap = {b"a": {b"b": "action"}} + result = compile_keymap(keymap) + self.assertEqual(result, {b"a": {b"b": "action"}}) + + def test_empty_value(self): + keymap = {b"a": {b"": "action"}} + result = compile_keymap(keymap) + self.assertEqual(result, {b"a": {b"": "action"}}) + + def test_multiple_empty_values(self): + keymap = {b"a": {b"": "action1", b"b": "action2"}} + result = compile_keymap(keymap) + self.assertEqual(result, {b"a": {b"": "action1", b"b": "action2"}}) + + def test_multiple_keymaps(self): + keymap = {b"a": {b"b": "action1", b"c": "action2"}} + result = compile_keymap(keymap) + self.assertEqual(result, {b"a": {b"b": "action1", b"c": "action2"}}) + + def test_nested_multiple_keymaps(self): + keymap = {b"a": {b"b": {b"c": "action"}}} + result = compile_keymap(keymap) + self.assertEqual(result, {b"a": {b"b": {b"c": "action"}}}) + + def test_clashing_definitions(self): + km = {b'a': 'c', b'a' + b'b': 'd'} + with self.assertRaises(KeySpecError): + compile_keymap(km) + + def test_non_bytes_key(self): + with self.assertRaises(TypeError): + compile_keymap({123: 'a'}) diff --git a/Lib/test/test_pyrepl.py b/Lib/test/test_pyrepl/test_pyrepl.py similarity index 58% rename from Lib/test/test_pyrepl.py rename to Lib/test/test_pyrepl/test_pyrepl.py index c8990b699b214c..45114e7315749f 100644 --- a/Lib/test/test_pyrepl.py +++ b/Lib/test/test_pyrepl/test_pyrepl.py @@ -1,176 +1,35 @@ import itertools +import io import os import rlcompleter -import sys -import tempfile -import unittest -from code import InteractiveConsole -from functools import partial from unittest import TestCase -from unittest.mock import MagicMock, patch - -from test.support import requires -from test.support.import_helper import import_module - -# Optionally test pyrepl. This currently requires that the -# 'curses' resource be given on the regrtest command line using the -u -# option. Additionally, we need to attempt to import curses and readline. -requires("curses") -curses = import_module("curses") -readline = import_module("readline") - -from _pyrepl.console import Console, Event -from _pyrepl.readline import ReadlineAlikeReader, ReadlineConfig -from _pyrepl.simple_interact import _strip_final_indent -from _pyrepl.unix_eventqueue import EventQueue -from _pyrepl.simple_interact import InteractiveColoredConsole - - -def more_lines(unicodetext, namespace=None): - if namespace is None: - namespace = {} - src = _strip_final_indent(unicodetext) - console = InteractiveConsole(namespace, filename="") - try: - code = console.compile(src, "", "single") - except (OverflowError, SyntaxError, ValueError): - return False - else: - return code is None - - -def multiline_input(reader, namespace=None): - saved = reader.more_lines - try: - reader.more_lines = partial(more_lines, namespace=namespace) - reader.ps1 = reader.ps2 = ">>>" - reader.ps3 = reader.ps4 = "..." - return reader.readline() - finally: - reader.more_lines = saved - reader.paste_mode = False - - -def code_to_events(code): - for c in code: - yield Event(evt="key", data=c, raw=bytearray(c.encode("utf-8"))) - - -def prepare_mock_console(events, **kwargs): - console = MagicMock() - console.get_event.side_effect = events - console.height = 100 - console.width = 80 - for key, val in kwargs.items(): - setattr(console, key, val) - return console - - -def prepare_fake_console(**kwargs): - console = FakeConsole() - for key, val in kwargs.items(): - setattr(console, key, val) - return console - - -def prepare_reader(console, **kwargs): - config = ReadlineConfig(readline_completer=None) - reader = ReadlineAlikeReader(console=console, config=config) - reader.more_lines = partial(more_lines, namespace=None) - reader.paste_mode = True # Avoid extra indents - - def get_prompt(lineno, cursor_on_line) -> str: - return "" - - reader.get_prompt = get_prompt # Remove prompt for easier calculations of (x, y) - - for key, val in kwargs.items(): - setattr(reader, key, val) - - return reader - - -def handle_all_events( - events, prepare_console=prepare_mock_console, prepare_reader=prepare_reader -): - console = prepare_console(events) - reader = prepare_reader(console) - try: - while True: - reader.handle1() - except StopIteration: - pass - return reader, console - - -handle_events_narrow_console = partial( - handle_all_events, prepare_console=partial(prepare_mock_console, width=10) +from unittest.mock import patch + +from .support import ( + FakeConsole, + handle_all_events, + handle_events_narrow_console, + more_lines, + multiline_input, + code_to_events, ) - - -class FakeConsole(Console): - def __init__(self, events, encoding="utf-8"): - self.events = iter(events) - self.encoding = encoding - self.screen = [] - self.height = 100 - self.width = 80 - - def get_event(self, block: bool = True) -> Event | None: - return next(self.events) - - def getpending(self) -> Event: - return self.get_event(block=False) - - def getheightwidth(self) -> tuple[int, int]: - return self.height, self.width - - def refresh(self, screen: list[str], xy: tuple[int, int]) -> None: - pass - - def prepare(self) -> None: - pass - - def restore(self) -> None: - pass - - def move_cursor(self, x: int, y: int) -> None: - pass - - def set_cursor_vis(self, visible: bool) -> None: - pass - - def push_char(self, char: int | bytes) -> None: - pass - - def beep(self) -> None: - pass - - def clear(self) -> None: - pass - - def finish(self) -> None: - pass - - def flushoutput(self) -> None: - pass - - def forgetinput(self) -> None: - pass - - def wait(self) -> None: - pass - - def repaint(self) -> None: - pass +from _pyrepl.console import Event +from _pyrepl.readline import ReadlineAlikeReader, ReadlineConfig +from _pyrepl.readline import multiline_input as readline_multiline_input class TestCursorPosition(TestCase): + def prepare_reader(self, events): + console = FakeConsole(events) + config = ReadlineConfig(readline_completer=None) + reader = ReadlineAlikeReader(console=console, config=config) + return reader + def test_up_arrow_simple(self): # fmt: off code = ( - 'def f():\n' - ' ...\n' + "def f():\n" + " ...\n" ) # fmt: on events = itertools.chain( @@ -187,8 +46,8 @@ def test_up_arrow_simple(self): def test_down_arrow_end_of_input(self): # fmt: off code = ( - 'def f():\n' - ' ...\n' + "def f():\n" + " ...\n" ) # fmt: on events = itertools.chain( @@ -454,6 +313,158 @@ def test_cursor_position_after_wrap_and_move_up(self): self.assertEqual(reader.cxy, (1, 1)) +class TestPyReplAutoindent(TestCase): + def prepare_reader(self, events): + console = FakeConsole(events) + config = ReadlineConfig(readline_completer=None) + reader = ReadlineAlikeReader(console=console, config=config) + return reader + + def test_auto_indent_default(self): + # fmt: off + input_code = ( + "def f():\n" + "pass\n\n" + ) + + output_code = ( + "def f():\n" + " pass\n" + " " + ) + # fmt: on + + def test_auto_indent_continuation(self): + # auto indenting according to previous user indentation + # fmt: off + events = itertools.chain( + code_to_events("def f():\n"), + # add backspace to delete default auto-indent + [ + Event(evt="key", data="backspace", raw=bytearray(b"\x7f")), + ], + code_to_events( + " pass\n" + "pass\n\n" + ), + ) + + output_code = ( + "def f():\n" + " pass\n" + " pass\n" + " " + ) + # fmt: on + + reader = self.prepare_reader(events) + output = multiline_input(reader) + self.assertEqual(output, output_code) + + def test_auto_indent_prev_block(self): + # auto indenting according to indentation in different block + # fmt: off + events = itertools.chain( + code_to_events("def f():\n"), + # add backspace to delete default auto-indent + [ + Event(evt="key", data="backspace", raw=bytearray(b"\x7f")), + ], + code_to_events( + " pass\n" + "pass\n\n" + ), + code_to_events( + "def g():\n" + "pass\n\n" + ), + ) + + output_code = ( + "def g():\n" + " pass\n" + " " + ) + # fmt: on + + reader = self.prepare_reader(events) + output1 = multiline_input(reader) + output2 = multiline_input(reader) + self.assertEqual(output2, output_code) + + def test_auto_indent_multiline(self): + # fmt: off + events = itertools.chain( + code_to_events( + "def f():\n" + "pass" + ), + [ + # go to the end of the first line + Event(evt="key", data="up", raw=bytearray(b"\x1bOA")), + Event(evt="key", data="\x05", raw=bytearray(b"\x1bO5")), + # new line should be autoindented + Event(evt="key", data="\n", raw=bytearray(b"\n")), + ], + code_to_events( + "pass" + ), + [ + # go to end of last line + Event(evt="key", data="down", raw=bytearray(b"\x1bOB")), + Event(evt="key", data="\x05", raw=bytearray(b"\x1bO5")), + # double newline to terminate the block + Event(evt="key", data="\n", raw=bytearray(b"\n")), + Event(evt="key", data="\n", raw=bytearray(b"\n")), + ], + ) + + output_code = ( + "def f():\n" + " pass\n" + " pass\n" + " " + ) + # fmt: on + + reader = self.prepare_reader(events) + output = multiline_input(reader) + self.assertEqual(output, output_code) + + def test_auto_indent_with_comment(self): + # fmt: off + events = code_to_events( + "def f(): # foo\n" + "pass\n\n" + ) + + output_code = ( + "def f(): # foo\n" + " pass\n" + " " + ) + # fmt: on + + reader = self.prepare_reader(events) + output = multiline_input(reader) + self.assertEqual(output, output_code) + + def test_auto_indent_ignore_comments(self): + # fmt: off + events = code_to_events( + "pass #:\n" + ) + + output_code = ( + "pass #:" + ) + # fmt: on + + reader = self.prepare_reader(events) + output = multiline_input(reader) + self.assertEqual(output, output_code) + + class TestPyReplOutput(TestCase): def prepare_reader(self, events): console = FakeConsole(events) @@ -469,27 +480,34 @@ def test_basic(self): def test_multiline_edit(self): events = itertools.chain( - code_to_events("def f():\n ...\n\n"), + code_to_events("def f():\n...\n\n"), [ Event(evt="key", data="up", raw=bytearray(b"\x1bOA")), Event(evt="key", data="up", raw=bytearray(b"\x1bOA")), - Event(evt="key", data="up", raw=bytearray(b"\x1bOA")), - Event(evt="key", data="right", raw=bytearray(b"\x1bOC")), - Event(evt="key", data="right", raw=bytearray(b"\x1bOC")), - Event(evt="key", data="right", raw=bytearray(b"\x1bOC")), - Event(evt="key", data="backspace", raw=bytearray(b"\x7f")), + Event(evt="key", data="left", raw=bytearray(b"\x1bOD")), + Event(evt="key", data="left", raw=bytearray(b"\x1bOD")), + Event(evt="key", data="left", raw=bytearray(b"\x1bOD")), + Event(evt="key", data="backspace", raw=bytearray(b"\x08")), Event(evt="key", data="g", raw=bytearray(b"g")), Event(evt="key", data="down", raw=bytearray(b"\x1bOB")), - Event(evt="key", data="down", raw=bytearray(b"\x1bOB")), + Event(evt="key", data="backspace", raw=bytearray(b"\x08")), + Event(evt="key", data="delete", raw=bytearray(b"\x7F")), + Event(evt="key", data="right", raw=bytearray(b"g")), + Event(evt="key", data="backspace", raw=bytearray(b"\x08")), + Event(evt="key", data="p", raw=bytearray(b"p")), + Event(evt="key", data="a", raw=bytearray(b"a")), + Event(evt="key", data="s", raw=bytearray(b"s")), + Event(evt="key", data="s", raw=bytearray(b"s")), + Event(evt="key", data="\n", raw=bytearray(b"\n")), Event(evt="key", data="\n", raw=bytearray(b"\n")), ], ) reader = self.prepare_reader(events) output = multiline_input(reader) - self.assertEqual(output, "def f():\n ...\n ") + self.assertEqual(output, "def f():\n ...\n ") output = multiline_input(reader) - self.assertEqual(output, "def g():\n ...\n ") + self.assertEqual(output, "def g():\n pass\n ") def test_history_navigation_with_up_arrow(self): events = itertools.chain( @@ -569,14 +587,15 @@ def prepare_reader(self, events, namespace): reader = ReadlineAlikeReader(console=console, config=config) return reader + @patch("rlcompleter._readline_available", False) def test_simple_completion(self): - events = code_to_events("os.geten\t\n") + events = code_to_events("os.getpid\t\n") namespace = {"os": os} reader = self.prepare_reader(events, namespace) output = multiline_input(reader, namespace) - self.assertEqual(output, "os.getenv") + self.assertEqual(output, "os.getpid()") def test_completion_with_many_options(self): # Test with something that initially displays many options @@ -607,112 +626,49 @@ def test_global_namespace_completion(self): output = multiline_input(reader, namespace) self.assertEqual(output, "python") + def test_updown_arrow_with_completion_menu(self): + """Up arrow in the middle of unfinished tab completion when the menu is displayed + should work and trigger going back in history. Down arrow should subsequently + get us back to the incomplete command.""" + code = "import os\nos.\t\t" + namespace = {"os": os} -@patch("_pyrepl.curses.tigetstr", lambda x: b"") -class TestUnivEventQueue(TestCase): - def setUp(self): - self.file = tempfile.TemporaryFile() - - def tearDown(self) -> None: - self.file.close() - - def test_get(self): - eq = EventQueue(self.file.fileno(), "utf-8") - event = Event("key", "a", b"a") - eq.insert(event) - self.assertEqual(eq.get(), event) - - def test_empty(self): - eq = EventQueue(self.file.fileno(), "utf-8") - self.assertTrue(eq.empty()) - eq.insert(Event("key", "a", b"a")) - self.assertFalse(eq.empty()) - - def test_flush_buf(self): - eq = EventQueue(self.file.fileno(), "utf-8") - eq.buf.extend(b"test") - self.assertEqual(eq.flush_buf(), b"test") - self.assertEqual(eq.buf, bytearray()) - - def test_insert(self): - eq = EventQueue(self.file.fileno(), "utf-8") - event = Event("key", "a", b"a") - eq.insert(event) - self.assertEqual(eq.events[0], event) - - @patch("_pyrepl.unix_eventqueue.keymap") - def test_push_with_key_in_keymap(self, mock_keymap): - mock_keymap.compile_keymap.return_value = {"a": "b"} - eq = EventQueue(self.file.fileno(), "utf-8") - eq.keymap = {b"a": "b"} - eq.push("a") - mock_keymap.compile_keymap.assert_called() - self.assertEqual(eq.events[0].evt, "key") - self.assertEqual(eq.events[0].data, "b") - - @patch("_pyrepl.unix_eventqueue.keymap") - def test_push_without_key_in_keymap(self, mock_keymap): - mock_keymap.compile_keymap.return_value = {"a": "b"} - eq = EventQueue(self.file.fileno(), "utf-8") - eq.keymap = {b"c": "d"} - eq.push("a") - mock_keymap.compile_keymap.assert_called() - self.assertEqual(eq.events[0].evt, "key") - self.assertEqual(eq.events[0].data, "a") - - @patch("_pyrepl.unix_eventqueue.keymap") - def test_push_with_keymap_in_keymap(self, mock_keymap): - mock_keymap.compile_keymap.return_value = {"a": "b"} - eq = EventQueue(self.file.fileno(), "utf-8") - eq.keymap = {b"a": {b"b": "c"}} - eq.push("a") - mock_keymap.compile_keymap.assert_called() - self.assertTrue(eq.empty()) - eq.push("b") - self.assertEqual(eq.events[0].evt, "key") - self.assertEqual(eq.events[0].data, "c") - eq.push("d") - self.assertEqual(eq.events[1].evt, "key") - self.assertEqual(eq.events[1].data, "d") - - @patch("_pyrepl.unix_eventqueue.keymap") - def test_push_with_keymap_in_keymap_and_escape(self, mock_keymap): - mock_keymap.compile_keymap.return_value = {"a": "b"} - eq = EventQueue(self.file.fileno(), "utf-8") - eq.keymap = {b"a": {b"b": "c"}} - eq.push("a") - mock_keymap.compile_keymap.assert_called() - self.assertTrue(eq.empty()) - eq.flush_buf() - eq.push("\033") - self.assertEqual(eq.events[0].evt, "key") - self.assertEqual(eq.events[0].data, "\033") - eq.push("b") - self.assertEqual(eq.events[1].evt, "key") - self.assertEqual(eq.events[1].data, "b") - - def test_push_special_key(self): - eq = EventQueue(self.file.fileno(), "utf-8") - eq.keymap = {} - eq.push("\x1b") - eq.push("[") - eq.push("A") - self.assertEqual(eq.events[0].evt, "key") - self.assertEqual(eq.events[0].data, "\x1b") - - def test_push_unrecognized_escape_sequence(self): - eq = EventQueue(self.file.fileno(), "utf-8") - eq.keymap = {} - eq.push("\x1b") - eq.push("[") - eq.push("Z") - self.assertEqual(len(eq.events), 3) - self.assertEqual(eq.events[0].evt, "key") - self.assertEqual(eq.events[0].data, "\x1b") - self.assertEqual(eq.events[1].evt, "key") - self.assertEqual(eq.events[1].data, "[") - self.assertEqual(eq.events[2].evt, "key") - self.assertEqual(eq.events[2].data, "Z") + events = itertools.chain( + code_to_events(code), + [ + Event(evt="key", data="up", raw=bytearray(b"\x1bOA")), + Event(evt="key", data="down", raw=bytearray(b"\x1bOB")), + ], + code_to_events("\n"), + ) + reader = self.prepare_reader(events, namespace=namespace) + output = multiline_input(reader, namespace) + # This is the first line, nothing to see here + self.assertEqual(output, "import os") + # This is the second line. We pressed up and down arrows + # so we should end up where we were when we initiated tab completion. + output = multiline_input(reader, namespace) + self.assertEqual(output, "os.") + + @patch("_pyrepl.readline._ReadlineWrapper.get_reader") + @patch("sys.stderr", new_callable=io.StringIO) + def test_completion_with_warnings(self, mock_stderr, mock_get_reader): + class Dummy: + @property + def test_func(self): + import warnings + + warnings.warn("warnings\n") + return None + + dummy = Dummy() + events = code_to_events("dummy.test_func.\t\n\n") + namespace = {"dummy": dummy} + reader = self.prepare_reader(events, namespace) + mock_get_reader.return_value = reader + output = readline_multiline_input(more_lines, ">>>", "...") + self.assertEqual(output, "dummy.test_func.__") + self.assertEqual(mock_stderr.getvalue(), "") class TestPasteEvent(TestCase): @@ -725,12 +681,12 @@ def prepare_reader(self, events): def test_paste(self): # fmt: off code = ( - 'def a():\n' - ' for x in range(10):\n' - ' if x%2:\n' - ' print(x)\n' - ' else:\n' - ' pass\n' + "def a():\n" + " for x in range(10):\n" + " if x%2:\n" + " print(x)\n" + " else:\n" + " pass\n" ) # fmt: on @@ -751,10 +707,10 @@ def test_paste(self): def test_paste_mid_newlines(self): # fmt: off code = ( - 'def f():\n' - ' x = y\n' - ' \n' - ' y = z\n' + "def f():\n" + " x = y\n" + " \n" + " y = z\n" ) # fmt: on @@ -775,16 +731,16 @@ def test_paste_mid_newlines(self): def test_paste_mid_newlines_not_in_paste_mode(self): # fmt: off code = ( - 'def f():\n' - ' x = y\n' - ' \n' - ' y = z\n\n' + "def f():\n" + "x = y\n" + "\n" + "y = z\n\n" ) expected = ( - 'def f():\n' - ' x = y\n' - ' ' + "def f():\n" + " x = y\n" + " " ) # fmt: on @@ -796,20 +752,20 @@ def test_paste_mid_newlines_not_in_paste_mode(self): def test_paste_not_in_paste_mode(self): # fmt: off input_code = ( - 'def a():\n' - ' for x in range(10):\n' - ' if x%2:\n' - ' print(x)\n' - ' else:\n' - ' pass\n\n' + "def a():\n" + "for x in range(10):\n" + "if x%2:\n" + "print(x)\n" + "else:\n" + "pass\n\n" ) output_code = ( - 'def a():\n' - ' for x in range(10):\n' - ' if x%2:\n' - ' print(x)\n' - ' else:' + "def a():\n" + " for x in range(10):\n" + " if x%2:\n" + " print(x)\n" + " else:" ) # fmt: on @@ -822,25 +778,25 @@ def test_bracketed_paste(self): """Test that bracketed paste using \x1b[200~ and \x1b[201~ works.""" # fmt: off input_code = ( - 'def a():\n' - ' for x in range(10):\n' - '\n' - ' if x%2:\n' - ' print(x)\n' - '\n' - ' else:\n' - ' pass\n' + "def a():\n" + " for x in range(10):\n" + "\n" + " if x%2:\n" + " print(x)\n" + "\n" + " else:\n" + " pass\n" ) output_code = ( - 'def a():\n' - ' for x in range(10):\n' - '\n' - ' if x%2:\n' - ' print(x)\n' - '\n' - ' else:\n' - ' pass\n' + "def a():\n" + " for x in range(10):\n" + "\n" + " if x%2:\n" + " print(x)\n" + "\n" + " else:\n" + " pass\n" ) # fmt: on @@ -872,135 +828,3 @@ def test_bracketed_paste_single_line(self): reader = self.prepare_reader(events) output = multiline_input(reader) self.assertEqual(output, input_code) - - -class TestReader(TestCase): - def assert_screen_equals(self, reader, expected): - actual = reader.calc_screen() - expected = expected.split("\n") - self.assertListEqual(actual, expected) - - def test_calc_screen_wrap_simple(self): - events = code_to_events(10 * "a") - reader, _ = handle_events_narrow_console(events) - self.assert_screen_equals(reader, f"{9*"a"}\\\na") - - def test_calc_screen_wrap_wide_characters(self): - events = code_to_events(8 * "a" + "樂") - reader, _ = handle_events_narrow_console(events) - self.assert_screen_equals(reader, f"{8*"a"}\\\n樂") - - def test_calc_screen_wrap_three_lines(self): - events = code_to_events(20 * "a") - reader, _ = handle_events_narrow_console(events) - self.assert_screen_equals(reader, f"{9*"a"}\\\n{9*"a"}\\\naa") - - def test_calc_screen_wrap_three_lines_mixed_character(self): - # fmt: off - code = ( - "def f():\n" - f" {8*"a"}\n" - f" {5*"樂"}" - ) - # fmt: on - - events = code_to_events(code) - reader, _ = handle_events_narrow_console(events) - - # fmt: off - self.assert_screen_equals(reader, ( - "def f():\n" - f" {7*"a"}\\\n" - "a\n" - f" {3*"樂"}\\\n" - "樂樂" - )) - # fmt: on - - def test_calc_screen_backspace(self): - events = itertools.chain( - code_to_events("aaa"), - [ - Event(evt="key", data="backspace", raw=bytearray(b"\x7f")), - ], - ) - reader, _ = handle_all_events(events) - self.assert_screen_equals(reader, "aa") - - def test_calc_screen_wrap_removes_after_backspace(self): - events = itertools.chain( - code_to_events(10 * "a"), - [ - Event(evt="key", data="backspace", raw=bytearray(b"\x7f")), - ], - ) - reader, _ = handle_events_narrow_console(events) - self.assert_screen_equals(reader, 9 * "a") - - def test_calc_screen_backspace_in_second_line_after_wrap(self): - events = itertools.chain( - code_to_events(11 * "a"), - [ - Event(evt="key", data="backspace", raw=bytearray(b"\x7f")), - ], - ) - reader, _ = handle_events_narrow_console(events) - self.assert_screen_equals(reader, f"{9*"a"}\\\na") - - def test_setpos_for_xy_simple(self): - events = code_to_events("11+11") - reader, _ = handle_all_events(events) - reader.setpos_from_xy(0, 0) - self.assertEqual(reader.pos, 0) - - def test_setpos_from_xy_multiple_lines(self): - # fmt: off - code = ( - "def foo():\n" - " return 1" - ) - # fmt: on - - events = code_to_events(code) - reader, _ = handle_all_events(events) - reader.setpos_from_xy(2, 1) - self.assertEqual(reader.pos, 13) - - def test_setpos_from_xy_after_wrap(self): - # fmt: off - code = ( - "def foo():\n" - " hello" - ) - # fmt: on - - events = code_to_events(code) - reader, _ = handle_events_narrow_console(events) - reader.setpos_from_xy(2, 2) - self.assertEqual(reader.pos, 13) - - def test_setpos_fromxy_in_wrapped_line(self): - # fmt: off - code = ( - "def foo():\n" - " hello" - ) - # fmt: on - - events = code_to_events(code) - reader, _ = handle_events_narrow_console(events) - reader.setpos_from_xy(0, 1) - self.assertEqual(reader.pos, 9) - - def test_up_arrow_after_ctrl_r(self): - events = iter([ - Event(evt='key', data='\x12', raw=bytearray(b'\x12')), - Event(evt='key', data='up', raw=bytearray(b'\x1bOA')), - ]) - - reader, _ = handle_all_events(events) - self.assert_screen_equals(reader, "") - - -if __name__ == '__main__': - unittest.main() diff --git a/Lib/test/test_pyrepl/test_reader.py b/Lib/test/test_pyrepl/test_reader.py new file mode 100644 index 00000000000000..78b11323d60a85 --- /dev/null +++ b/Lib/test/test_pyrepl/test_reader.py @@ -0,0 +1,278 @@ +import itertools +import functools +import rlcompleter +from unittest import TestCase +from unittest.mock import MagicMock, patch + +from .support import handle_all_events, handle_events_narrow_console, code_to_events, prepare_reader +from test.support import import_helper +from _pyrepl.console import Event +from _pyrepl.reader import Reader + + +class TestReader(TestCase): + def assert_screen_equals(self, reader, expected): + actual = reader.screen + expected = expected.split("\n") + self.assertListEqual(actual, expected) + + def test_calc_screen_wrap_simple(self): + events = code_to_events(10 * "a") + reader, _ = handle_events_narrow_console(events) + self.assert_screen_equals(reader, f"{9*"a"}\\\na") + + def test_calc_screen_wrap_wide_characters(self): + events = code_to_events(8 * "a" + "樂") + reader, _ = handle_events_narrow_console(events) + self.assert_screen_equals(reader, f"{8*"a"}\\\n樂") + + def test_calc_screen_wrap_three_lines(self): + events = code_to_events(20 * "a") + reader, _ = handle_events_narrow_console(events) + self.assert_screen_equals(reader, f"{9*"a"}\\\n{9*"a"}\\\naa") + + def test_calc_screen_wrap_three_lines_mixed_character(self): + # fmt: off + code = ( + "def f():\n" + f" {8*"a"}\n" + f" {5*"樂"}" + ) + # fmt: on + + events = code_to_events(code) + reader, _ = handle_events_narrow_console(events) + + # fmt: off + self.assert_screen_equals(reader, ( + "def f():\n" + f" {7*"a"}\\\n" + "a\n" + f" {3*"樂"}\\\n" + "樂樂" + )) + # fmt: on + + def test_calc_screen_backspace(self): + events = itertools.chain( + code_to_events("aaa"), + [ + Event(evt="key", data="backspace", raw=bytearray(b"\x7f")), + ], + ) + reader, _ = handle_all_events(events) + self.assert_screen_equals(reader, "aa") + + def test_calc_screen_wrap_removes_after_backspace(self): + events = itertools.chain( + code_to_events(10 * "a"), + [ + Event(evt="key", data="backspace", raw=bytearray(b"\x7f")), + ], + ) + reader, _ = handle_events_narrow_console(events) + self.assert_screen_equals(reader, 9 * "a") + + def test_calc_screen_backspace_in_second_line_after_wrap(self): + events = itertools.chain( + code_to_events(11 * "a"), + [ + Event(evt="key", data="backspace", raw=bytearray(b"\x7f")), + ], + ) + reader, _ = handle_events_narrow_console(events) + self.assert_screen_equals(reader, f"{9*"a"}\\\na") + + def test_setpos_for_xy_simple(self): + events = code_to_events("11+11") + reader, _ = handle_all_events(events) + reader.setpos_from_xy(0, 0) + self.assertEqual(reader.pos, 0) + + def test_setpos_from_xy_multiple_lines(self): + # fmt: off + code = ( + "def foo():\n" + " return 1" + ) + # fmt: on + + events = code_to_events(code) + reader, _ = handle_all_events(events) + reader.setpos_from_xy(2, 1) + self.assertEqual(reader.pos, 13) + + def test_setpos_from_xy_after_wrap(self): + # fmt: off + code = ( + "def foo():\n" + " hello" + ) + # fmt: on + + events = code_to_events(code) + reader, _ = handle_events_narrow_console(events) + reader.setpos_from_xy(2, 2) + self.assertEqual(reader.pos, 13) + + def test_setpos_fromxy_in_wrapped_line(self): + # fmt: off + code = ( + "def foo():\n" + " hello" + ) + # fmt: on + + events = code_to_events(code) + reader, _ = handle_events_narrow_console(events) + reader.setpos_from_xy(0, 1) + self.assertEqual(reader.pos, 9) + + def test_up_arrow_after_ctrl_r(self): + events = iter( + [ + Event(evt="key", data="\x12", raw=bytearray(b"\x12")), + Event(evt="key", data="up", raw=bytearray(b"\x1bOA")), + ] + ) + + reader, _ = handle_all_events(events) + self.assert_screen_equals(reader, "") + + def test_newline_within_block_trailing_whitespace(self): + # fmt: off + code = ( + "def foo():\n" + "a = 1\n" + ) + # fmt: on + + events = itertools.chain( + code_to_events(code), + [ + # go to the end of the first line + Event(evt="key", data="up", raw=bytearray(b"\x1bOA")), + Event(evt="key", data="up", raw=bytearray(b"\x1bOA")), + Event(evt="key", data="\x05", raw=bytearray(b"\x1bO5")), + # new lines in-block shouldn't terminate the block + Event(evt="key", data="\n", raw=bytearray(b"\n")), + Event(evt="key", data="\n", raw=bytearray(b"\n")), + # end of line 2 + Event(evt="key", data="down", raw=bytearray(b"\x1bOB")), + Event(evt="key", data="\x05", raw=bytearray(b"\x1bO5")), + # a double new line in-block should terminate the block + # even if its followed by whitespace + Event(evt="key", data="\n", raw=bytearray(b"\n")), + Event(evt="key", data="\n", raw=bytearray(b"\n")), + ], + ) + + no_paste_reader = functools.partial(prepare_reader, paste_mode=False) + reader, _ = handle_all_events(events, prepare_reader=no_paste_reader) + + expected = ( + "def foo():\n" + " \n" + " \n" + " a = 1\n" + " \n" + " " # HistoricalReader will trim trailing whitespace + ) + self.assert_screen_equals(reader, expected) + self.assertTrue(reader.finished) + + def test_input_hook_is_called_if_set(self): + input_hook = MagicMock() + def _prepare_console(events): + console = MagicMock() + console.get_event.side_effect = events + console.height = 100 + console.width = 80 + console.input_hook = input_hook + return console + + events = code_to_events("a") + reader, _ = handle_all_events(events, prepare_console=_prepare_console) + + self.assertEqual(len(input_hook.mock_calls), 4) + + def test_keyboard_interrupt_clears_screen(self): + namespace = {"itertools": itertools} + code = "import itertools\nitertools." + events = itertools.chain(code_to_events(code), [ + Event(evt='key', data='\t', raw=bytearray(b'\t')), # Two tabs for completion + Event(evt='key', data='\t', raw=bytearray(b'\t')), + Event(evt='key', data='\x03', raw=bytearray(b'\x03')), # Ctrl-C + ]) + + completing_reader = functools.partial( + prepare_reader, + readline_completer=rlcompleter.Completer(namespace).complete + ) + reader, _ = handle_all_events(events, prepare_reader=completing_reader) + self.assertEqual(reader.calc_screen(), code.split("\n")) + + def test_prompt_length(self): + # Handles simple ASCII prompt + ps1 = ">>> " + prompt, l = Reader.process_prompt(ps1) + self.assertEqual(prompt, ps1) + self.assertEqual(l, 4) + + # Handles ANSI escape sequences + ps1 = "\033[0;32m>>> \033[0m" + prompt, l = Reader.process_prompt(ps1) + self.assertEqual(prompt, "\033[0;32m>>> \033[0m") + self.assertEqual(l, 4) + + # Handles ANSI escape sequences bracketed in \001 .. \002 + ps1 = "\001\033[0;32m\002>>> \001\033[0m\002" + prompt, l = Reader.process_prompt(ps1) + self.assertEqual(prompt, "\033[0;32m>>> \033[0m") + self.assertEqual(l, 4) + + # Handles wide characters in prompt + ps1 = "樂>> " + prompt, l = Reader.process_prompt(ps1) + self.assertEqual(prompt, ps1) + self.assertEqual(l, 5) + + # Handles wide characters AND ANSI sequences together + ps1 = "\001\033[0;32m\002樂>\001\033[0m\002> " + prompt, l = Reader.process_prompt(ps1) + self.assertEqual(prompt, "\033[0;32m樂>\033[0m> ") + self.assertEqual(l, 5) + + def test_completions_updated_on_key_press(self): + namespace = {"itertools": itertools} + code = "itertools." + events = itertools.chain(code_to_events(code), [ + Event(evt='key', data='\t', raw=bytearray(b'\t')), # Two tabs for completion + Event(evt='key', data='\t', raw=bytearray(b'\t')), + ], code_to_events("a")) + + completing_reader = functools.partial( + prepare_reader, + readline_completer=rlcompleter.Completer(namespace).complete + ) + reader, _ = handle_all_events(events, prepare_reader=completing_reader) + + actual = reader.screen + self.assertEqual(len(actual), 2) + self.assertEqual(actual[0].rstrip(), "itertools.accumulate(") + self.assertEqual(actual[1], f"{code}a") + + def test_key_press_on_tab_press_once(self): + namespace = {"itertools": itertools} + code = "itertools." + events = itertools.chain(code_to_events(code), [ + Event(evt='key', data='\t', raw=bytearray(b'\t')), + ], code_to_events("a")) + + completing_reader = functools.partial( + prepare_reader, + readline_completer=rlcompleter.Completer(namespace).complete + ) + reader, _ = handle_all_events(events, prepare_reader=completing_reader) + + self.assert_screen_equals(reader, f"{code}a") diff --git a/Lib/test/test_pyrepl/test_unix_console.py b/Lib/test/test_pyrepl/test_unix_console.py new file mode 100644 index 00000000000000..e3bbabcb0089fb --- /dev/null +++ b/Lib/test/test_pyrepl/test_unix_console.py @@ -0,0 +1,314 @@ +import itertools +import sys +import unittest +from functools import partial +from unittest import TestCase +from unittest.mock import MagicMock, call, patch, ANY + +from .support import handle_all_events, code_to_events + +try: + from _pyrepl.console import Event + from _pyrepl.unix_console import UnixConsole +except ImportError: + pass + + +def unix_console(events, **kwargs): + console = UnixConsole() + console.get_event = MagicMock(side_effect=events) + + height = kwargs.get("height", 25) + width = kwargs.get("width", 80) + console.getheightwidth = MagicMock(side_effect=lambda: (height, width)) + + console.prepare() + for key, val in kwargs.items(): + setattr(console, key, val) + return console + + +handle_events_unix_console = partial( + handle_all_events, + prepare_console=partial(unix_console), +) +handle_events_narrow_unix_console = partial( + handle_all_events, + prepare_console=partial(unix_console, width=5), +) +handle_events_short_unix_console = partial( + handle_all_events, + prepare_console=partial(unix_console, height=1), +) +handle_events_unix_console_height_3 = partial( + handle_all_events, prepare_console=partial(unix_console, height=3) +) + + +TERM_CAPABILITIES = { + "bel": b"\x07", + "civis": b"\x1b[?25l", + "clear": b"\x1b[H\x1b[2J", + "cnorm": b"\x1b[?12l\x1b[?25h", + "cub": b"\x1b[%p1%dD", + "cub1": b"\x08", + "cud": b"\x1b[%p1%dB", + "cud1": b"\n", + "cuf": b"\x1b[%p1%dC", + "cuf1": b"\x1b[C", + "cup": b"\x1b[%i%p1%d;%p2%dH", + "cuu": b"\x1b[%p1%dA", + "cuu1": b"\x1b[A", + "dch1": b"\x1b[P", + "dch": b"\x1b[%p1%dP", + "el": b"\x1b[K", + "hpa": b"\x1b[%i%p1%dG", + "ich": b"\x1b[%p1%d@", + "ich1": None, + "ind": b"\n", + "pad": None, + "ri": b"\x1bM", + "rmkx": b"\x1b[?1l\x1b>", + "smkx": b"\x1b[?1h\x1b=", +} + + +@unittest.skipIf(sys.platform == "win32", "No Unix event queue on Windows") +@patch("_pyrepl.curses.tigetstr", lambda s: TERM_CAPABILITIES.get(s)) +@patch( + "_pyrepl.curses.tparm", + lambda s, *args: s + b":" + b",".join(str(i).encode() for i in args), +) +@patch("_pyrepl.curses.setupterm", lambda a, b: None) +@patch( + "termios.tcgetattr", + lambda _: [ + 27394, + 3, + 19200, + 536872399, + 38400, + 38400, + [ + b"\x04", + b"\xff", + b"\xff", + b"\x7f", + b"\x17", + b"\x15", + b"\x12", + b"\x00", + b"\x03", + b"\x1c", + b"\x1a", + b"\x19", + b"\x11", + b"\x13", + b"\x16", + b"\x0f", + b"\x01", + b"\x00", + b"\x14", + b"\x00", + ], + ], +) +@patch("termios.tcsetattr", lambda a, b, c: None) +@patch("os.write") +class TestConsole(TestCase): + def test_simple_addition(self, _os_write): + code = "12+34" + events = code_to_events(code) + _, con = handle_events_unix_console(events) + _os_write.assert_any_call(ANY, b"1") + _os_write.assert_any_call(ANY, b"2") + _os_write.assert_any_call(ANY, b"+") + _os_write.assert_any_call(ANY, b"3") + _os_write.assert_any_call(ANY, b"4") + con.restore() + + def test_wrap(self, _os_write): + code = "12+34" + events = code_to_events(code) + _, con = handle_events_narrow_unix_console(events) + _os_write.assert_any_call(ANY, b"1") + _os_write.assert_any_call(ANY, b"2") + _os_write.assert_any_call(ANY, b"+") + _os_write.assert_any_call(ANY, b"3") + _os_write.assert_any_call(ANY, b"\\") + _os_write.assert_any_call(ANY, b"\n") + _os_write.assert_any_call(ANY, b"4") + con.restore() + + def test_cursor_left(self, _os_write): + code = "1" + events = itertools.chain( + code_to_events(code), + [Event(evt="key", data="left", raw=bytearray(b"\x1bOD"))], + ) + _, con = handle_events_unix_console(events) + _os_write.assert_any_call(ANY, TERM_CAPABILITIES["cub"] + b":1") + con.restore() + + def test_cursor_left_right(self, _os_write): + code = "1" + events = itertools.chain( + code_to_events(code), + [ + Event(evt="key", data="left", raw=bytearray(b"\x1bOD")), + Event(evt="key", data="right", raw=bytearray(b"\x1bOC")), + ], + ) + _, con = handle_events_unix_console(events) + _os_write.assert_any_call(ANY, TERM_CAPABILITIES["cub"] + b":1") + _os_write.assert_any_call(ANY, TERM_CAPABILITIES["cuf"] + b":1") + con.restore() + + def test_cursor_up(self, _os_write): + code = "1\n2+3" + events = itertools.chain( + code_to_events(code), + [Event(evt="key", data="up", raw=bytearray(b"\x1bOA"))], + ) + _, con = handle_events_unix_console(events) + _os_write.assert_any_call(ANY, TERM_CAPABILITIES["cuu"] + b":1") + con.restore() + + def test_cursor_up_down(self, _os_write): + code = "1\n2+3" + events = itertools.chain( + code_to_events(code), + [ + Event(evt="key", data="up", raw=bytearray(b"\x1bOA")), + Event(evt="key", data="down", raw=bytearray(b"\x1bOB")), + ], + ) + _, con = handle_events_unix_console(events) + _os_write.assert_any_call(ANY, TERM_CAPABILITIES["cuu"] + b":1") + _os_write.assert_any_call(ANY, TERM_CAPABILITIES["cud"] + b":1") + con.restore() + + def test_cursor_back_write(self, _os_write): + events = itertools.chain( + code_to_events("1"), + [Event(evt="key", data="left", raw=bytearray(b"\x1bOD"))], + code_to_events("2"), + ) + _, con = handle_events_unix_console(events) + _os_write.assert_any_call(ANY, b"1") + _os_write.assert_any_call(ANY, TERM_CAPABILITIES["cub"] + b":1") + _os_write.assert_any_call(ANY, b"2") + con.restore() + + def test_multiline_function_move_up_short_terminal(self, _os_write): + # fmt: off + code = ( + "def f():\n" + " foo" + ) + # fmt: on + + events = itertools.chain( + code_to_events(code), + [ + Event(evt="key", data="up", raw=bytearray(b"\x1bOA")), + Event(evt="scroll", data=None), + ], + ) + _, con = handle_events_short_unix_console(events) + _os_write.assert_any_call(ANY, TERM_CAPABILITIES["ri"] + b":") + con.restore() + + def test_multiline_function_move_up_down_short_terminal(self, _os_write): + # fmt: off + code = ( + "def f():\n" + " foo" + ) + # fmt: on + + events = itertools.chain( + code_to_events(code), + [ + Event(evt="key", data="up", raw=bytearray(b"\x1bOA")), + Event(evt="scroll", data=None), + Event(evt="key", data="down", raw=bytearray(b"\x1bOB")), + Event(evt="scroll", data=None), + ], + ) + _, con = handle_events_short_unix_console(events) + _os_write.assert_any_call(ANY, TERM_CAPABILITIES["ri"] + b":") + _os_write.assert_any_call(ANY, TERM_CAPABILITIES["ind"] + b":") + con.restore() + + def test_resize_bigger_on_multiline_function(self, _os_write): + # fmt: off + code = ( + "def f():\n" + " foo" + ) + # fmt: on + + events = itertools.chain(code_to_events(code)) + reader, console = handle_events_short_unix_console(events) + + console.height = 2 + console.getheightwidth = MagicMock(lambda _: (2, 80)) + + def same_reader(_): + return reader + + def same_console(events): + console.get_event = MagicMock(side_effect=events) + return console + + _, con = handle_all_events( + [Event(evt="resize", data=None)], + prepare_reader=same_reader, + prepare_console=same_console, + ) + _os_write.assert_has_calls( + [ + call(ANY, TERM_CAPABILITIES["ri"] + b":"), + call(ANY, TERM_CAPABILITIES["cup"] + b":0,0"), + call(ANY, b"def f():"), + ] + ) + console.restore() + con.restore() + + def test_resize_smaller_on_multiline_function(self, _os_write): + # fmt: off + code = ( + "def f():\n" + " foo" + ) + # fmt: on + + events = itertools.chain(code_to_events(code)) + reader, console = handle_events_unix_console_height_3(events) + + console.height = 1 + console.getheightwidth = MagicMock(lambda _: (1, 80)) + + def same_reader(_): + return reader + + def same_console(events): + console.get_event = MagicMock(side_effect=events) + return console + + _, con = handle_all_events( + [Event(evt="resize", data=None)], + prepare_reader=same_reader, + prepare_console=same_console, + ) + _os_write.assert_has_calls( + [ + call(ANY, TERM_CAPABILITIES["ind"] + b":"), + call(ANY, TERM_CAPABILITIES["cup"] + b":0,0"), + call(ANY, b" foo"), + ] + ) + console.restore() + con.restore() diff --git a/Lib/test/test_pyrepl/test_unix_eventqueue.py b/Lib/test/test_pyrepl/test_unix_eventqueue.py new file mode 100644 index 00000000000000..301f79927a741f --- /dev/null +++ b/Lib/test/test_pyrepl/test_unix_eventqueue.py @@ -0,0 +1,117 @@ +import tempfile +import unittest +import sys +from unittest.mock import patch + +try: + from _pyrepl.console import Event + from _pyrepl.unix_eventqueue import EventQueue +except ImportError: + pass + +@unittest.skipIf(sys.platform == "win32", "No Unix event queue on Windows") +@patch("_pyrepl.curses.tigetstr", lambda x: b"") +class TestUnixEventQueue(unittest.TestCase): + def setUp(self): + self.file = tempfile.TemporaryFile() + + def tearDown(self) -> None: + self.file.close() + + def test_get(self): + eq = EventQueue(self.file.fileno(), "utf-8") + event = Event("key", "a", b"a") + eq.insert(event) + self.assertEqual(eq.get(), event) + + def test_empty(self): + eq = EventQueue(self.file.fileno(), "utf-8") + self.assertTrue(eq.empty()) + eq.insert(Event("key", "a", b"a")) + self.assertFalse(eq.empty()) + + def test_flush_buf(self): + eq = EventQueue(self.file.fileno(), "utf-8") + eq.buf.extend(b"test") + self.assertEqual(eq.flush_buf(), b"test") + self.assertEqual(eq.buf, bytearray()) + + def test_insert(self): + eq = EventQueue(self.file.fileno(), "utf-8") + event = Event("key", "a", b"a") + eq.insert(event) + self.assertEqual(eq.events[0], event) + + @patch("_pyrepl.unix_eventqueue.keymap") + def test_push_with_key_in_keymap(self, mock_keymap): + mock_keymap.compile_keymap.return_value = {"a": "b"} + eq = EventQueue(self.file.fileno(), "utf-8") + eq.keymap = {b"a": "b"} + eq.push("a") + mock_keymap.compile_keymap.assert_called() + self.assertEqual(eq.events[0].evt, "key") + self.assertEqual(eq.events[0].data, "b") + + @patch("_pyrepl.unix_eventqueue.keymap") + def test_push_without_key_in_keymap(self, mock_keymap): + mock_keymap.compile_keymap.return_value = {"a": "b"} + eq = EventQueue(self.file.fileno(), "utf-8") + eq.keymap = {b"c": "d"} + eq.push("a") + mock_keymap.compile_keymap.assert_called() + self.assertEqual(eq.events[0].evt, "key") + self.assertEqual(eq.events[0].data, "a") + + @patch("_pyrepl.unix_eventqueue.keymap") + def test_push_with_keymap_in_keymap(self, mock_keymap): + mock_keymap.compile_keymap.return_value = {"a": "b"} + eq = EventQueue(self.file.fileno(), "utf-8") + eq.keymap = {b"a": {b"b": "c"}} + eq.push("a") + mock_keymap.compile_keymap.assert_called() + self.assertTrue(eq.empty()) + eq.push("b") + self.assertEqual(eq.events[0].evt, "key") + self.assertEqual(eq.events[0].data, "c") + eq.push("d") + self.assertEqual(eq.events[1].evt, "key") + self.assertEqual(eq.events[1].data, "d") + + @patch("_pyrepl.unix_eventqueue.keymap") + def test_push_with_keymap_in_keymap_and_escape(self, mock_keymap): + mock_keymap.compile_keymap.return_value = {"a": "b"} + eq = EventQueue(self.file.fileno(), "utf-8") + eq.keymap = {b"a": {b"b": "c"}} + eq.push("a") + mock_keymap.compile_keymap.assert_called() + self.assertTrue(eq.empty()) + eq.flush_buf() + eq.push("\033") + self.assertEqual(eq.events[0].evt, "key") + self.assertEqual(eq.events[0].data, "\033") + eq.push("b") + self.assertEqual(eq.events[1].evt, "key") + self.assertEqual(eq.events[1].data, "b") + + def test_push_special_key(self): + eq = EventQueue(self.file.fileno(), "utf-8") + eq.keymap = {} + eq.push("\x1b") + eq.push("[") + eq.push("A") + self.assertEqual(eq.events[0].evt, "key") + self.assertEqual(eq.events[0].data, "\x1b") + + def test_push_unrecognized_escape_sequence(self): + eq = EventQueue(self.file.fileno(), "utf-8") + eq.keymap = {} + eq.push("\x1b") + eq.push("[") + eq.push("Z") + self.assertEqual(len(eq.events), 3) + self.assertEqual(eq.events[0].evt, "key") + self.assertEqual(eq.events[0].data, "\x1b") + self.assertEqual(eq.events[1].evt, "key") + self.assertEqual(eq.events[1].data, "[") + self.assertEqual(eq.events[2].evt, "key") + self.assertEqual(eq.events[2].data, "Z") diff --git a/Lib/test/test_pyrepl/test_windows_console.py b/Lib/test/test_pyrepl/test_windows_console.py new file mode 100644 index 00000000000000..4a3b2baf64a944 --- /dev/null +++ b/Lib/test/test_pyrepl/test_windows_console.py @@ -0,0 +1,334 @@ +import sys +import unittest + +if sys.platform != "win32": + raise unittest.SkipTest("test only relevant on win32") + + +import itertools +from functools import partial +from typing import Iterable +from unittest import TestCase +from unittest.mock import MagicMock, call + +from .support import handle_all_events, code_to_events + +try: + from _pyrepl.console import Event, Console + from _pyrepl.windows_console import ( + WindowsConsole, + MOVE_LEFT, + MOVE_RIGHT, + MOVE_UP, + MOVE_DOWN, + ERASE_IN_LINE, + ) +except ImportError: + pass + + +class WindowsConsoleTests(TestCase): + def console(self, events, **kwargs) -> Console: + console = WindowsConsole() + console.get_event = MagicMock(side_effect=events) + console._scroll = MagicMock() + console._hide_cursor = MagicMock() + console._show_cursor = MagicMock() + console._getscrollbacksize = MagicMock(42) + console.out = MagicMock() + + height = kwargs.get("height", 25) + width = kwargs.get("width", 80) + console.getheightwidth = MagicMock(side_effect=lambda: (height, width)) + + console.prepare() + for key, val in kwargs.items(): + setattr(console, key, val) + return console + + def handle_events(self, events: Iterable[Event], **kwargs): + return handle_all_events(events, partial(self.console, **kwargs)) + + def handle_events_narrow(self, events): + return self.handle_events(events, width=5) + + def handle_events_short(self, events): + return self.handle_events(events, height=1) + + def handle_events_height_3(self, events): + return self.handle_events(events, height=3) + + def test_simple_addition(self): + code = "12+34" + events = code_to_events(code) + _, con = self.handle_events(events) + con.out.write.assert_any_call(b"1") + con.out.write.assert_any_call(b"2") + con.out.write.assert_any_call(b"+") + con.out.write.assert_any_call(b"3") + con.out.write.assert_any_call(b"4") + con.restore() + + def test_wrap(self): + code = "12+34" + events = code_to_events(code) + _, con = self.handle_events_narrow(events) + con.out.write.assert_any_call(b"1") + con.out.write.assert_any_call(b"2") + con.out.write.assert_any_call(b"+") + con.out.write.assert_any_call(b"3") + con.out.write.assert_any_call(b"\\") + con.out.write.assert_any_call(b"\n") + con.out.write.assert_any_call(b"4") + con.restore() + + def test_resize_wider(self): + code = "1234567890" + events = code_to_events(code) + reader, console = self.handle_events_narrow(events) + + console.height = 20 + console.width = 80 + console.getheightwidth = MagicMock(lambda _: (20, 80)) + + def same_reader(_): + return reader + + def same_console(events): + console.get_event = MagicMock(side_effect=events) + return console + + _, con = handle_all_events( + [Event(evt="resize", data=None)], + prepare_reader=same_reader, + prepare_console=same_console, + ) + + con.out.write.assert_any_call(self.move_right(2)) + con.out.write.assert_any_call(self.move_up(2)) + con.out.write.assert_any_call(b"567890") + + con.restore() + + def test_resize_narrower(self): + code = "1234567890" + events = code_to_events(code) + reader, console = self.handle_events(events) + + console.height = 20 + console.width = 4 + console.getheightwidth = MagicMock(lambda _: (20, 4)) + + def same_reader(_): + return reader + + def same_console(events): + console.get_event = MagicMock(side_effect=events) + return console + + _, con = handle_all_events( + [Event(evt="resize", data=None)], + prepare_reader=same_reader, + prepare_console=same_console, + ) + + con.out.write.assert_any_call(b"456\\") + con.out.write.assert_any_call(b"789\\") + + con.restore() + + def test_cursor_left(self): + code = "1" + events = itertools.chain( + code_to_events(code), + [Event(evt="key", data="left", raw=bytearray(b"\x1bOD"))], + ) + _, con = self.handle_events(events) + con.out.write.assert_any_call(self.move_left()) + con.restore() + + def test_cursor_left_right(self): + code = "1" + events = itertools.chain( + code_to_events(code), + [ + Event(evt="key", data="left", raw=bytearray(b"\x1bOD")), + Event(evt="key", data="right", raw=bytearray(b"\x1bOC")), + ], + ) + _, con = self.handle_events(events) + con.out.write.assert_any_call(self.move_left()) + con.out.write.assert_any_call(self.move_right()) + con.restore() + + def test_cursor_up(self): + code = "1\n2+3" + events = itertools.chain( + code_to_events(code), + [Event(evt="key", data="up", raw=bytearray(b"\x1bOA"))], + ) + _, con = self.handle_events(events) + con.out.write.assert_any_call(self.move_up()) + con.restore() + + def test_cursor_up_down(self): + code = "1\n2+3" + events = itertools.chain( + code_to_events(code), + [ + Event(evt="key", data="up", raw=bytearray(b"\x1bOA")), + Event(evt="key", data="down", raw=bytearray(b"\x1bOB")), + ], + ) + _, con = self.handle_events(events) + con.out.write.assert_any_call(self.move_up()) + con.out.write.assert_any_call(self.move_down()) + con.restore() + + def test_cursor_back_write(self): + events = itertools.chain( + code_to_events("1"), + [Event(evt="key", data="left", raw=bytearray(b"\x1bOD"))], + code_to_events("2"), + ) + _, con = self.handle_events(events) + con.out.write.assert_any_call(b"1") + con.out.write.assert_any_call(self.move_left()) + con.out.write.assert_any_call(b"21") + con.restore() + + def test_multiline_function_move_up_short_terminal(self): + # fmt: off + code = ( + "def f():\n" + " foo" + ) + # fmt: on + + events = itertools.chain( + code_to_events(code), + [ + Event(evt="key", data="up", raw=bytearray(b"\x1bOA")), + Event(evt="scroll", data=None), + ], + ) + _, con = self.handle_events_short(events) + con.out.write.assert_any_call(self.move_left(5)) + con.out.write.assert_any_call(self.move_up()) + con.restore() + + def test_multiline_function_move_up_down_short_terminal(self): + # fmt: off + code = ( + "def f():\n" + " foo" + ) + # fmt: on + + events = itertools.chain( + code_to_events(code), + [ + Event(evt="key", data="up", raw=bytearray(b"\x1bOA")), + Event(evt="scroll", data=None), + Event(evt="key", data="down", raw=bytearray(b"\x1bOB")), + Event(evt="scroll", data=None), + ], + ) + _, con = self.handle_events_short(events) + con.out.write.assert_any_call(self.move_left(8)) + con.out.write.assert_any_call(self.erase_in_line()) + con.restore() + + def test_resize_bigger_on_multiline_function(self): + # fmt: off + code = ( + "def f():\n" + " foo" + ) + # fmt: on + + events = itertools.chain(code_to_events(code)) + reader, console = self.handle_events_short(events) + + console.height = 2 + console.getheightwidth = MagicMock(lambda _: (2, 80)) + + def same_reader(_): + return reader + + def same_console(events): + console.get_event = MagicMock(side_effect=events) + return console + + _, con = handle_all_events( + [Event(evt="resize", data=None)], + prepare_reader=same_reader, + prepare_console=same_console, + ) + con.out.write.assert_has_calls( + [ + call(self.move_left(5)), + call(self.move_up()), + call(b"def f():"), + call(self.move_left(3)), + call(self.move_down()), + ] + ) + console.restore() + con.restore() + + def test_resize_smaller_on_multiline_function(self): + # fmt: off + code = ( + "def f():\n" + " foo" + ) + # fmt: on + + events = itertools.chain(code_to_events(code)) + reader, console = self.handle_events_height_3(events) + + console.height = 1 + console.getheightwidth = MagicMock(lambda _: (1, 80)) + + def same_reader(_): + return reader + + def same_console(events): + console.get_event = MagicMock(side_effect=events) + return console + + _, con = handle_all_events( + [Event(evt="resize", data=None)], + prepare_reader=same_reader, + prepare_console=same_console, + ) + con.out.write.assert_has_calls( + [ + call(self.move_left(5)), + call(self.move_up()), + call(self.erase_in_line()), + call(b" foo"), + ] + ) + console.restore() + con.restore() + + def move_up(self, lines=1): + return MOVE_UP.format(lines).encode("utf8") + + def move_down(self, lines=1): + return MOVE_DOWN.format(lines).encode("utf8") + + def move_left(self, cols=1): + return MOVE_LEFT.format(cols).encode("utf8") + + def move_right(self, cols=1): + return MOVE_RIGHT.format(cols).encode("utf8") + + def erase_in_line(self): + return ERASE_IN_LINE.encode("utf8") + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py index 809abd7e92d65f..17eff617a56aa4 100644 --- a/Lib/test/test_regrtest.py +++ b/Lib/test/test_regrtest.py @@ -473,15 +473,6 @@ def test_verbose3_huntrleaks(self): self.assertEqual(regrtest.hunt_refleak.runs, 10) self.assertFalse(regrtest.output_on_failure) - def test_xml_huntrleaks(self): - args = ['-R', '3:12', '--junit-xml', 'output.xml'] - with support.captured_stderr(): - regrtest = self.create_regrtest(args) - self.assertIsNotNone(regrtest.hunt_refleak) - self.assertEqual(regrtest.hunt_refleak.warmups, 3) - self.assertEqual(regrtest.hunt_refleak.runs, 12) - self.assertIsNone(regrtest.junit_filename) - @dataclasses.dataclass(slots=True) class Rerun: diff --git a/Lib/test/test_repl.py b/Lib/test/test_repl.py index 457279a4db687d..340178366fc13a 100644 --- a/Lib/test/test_repl.py +++ b/Lib/test/test_repl.py @@ -7,7 +7,7 @@ from textwrap import dedent from test import support from test.support import cpython_only, has_subprocess_support, SuppressCrashReport -from test.support.script_helper import kill_python +from test.support.script_helper import kill_python, assert_python_ok from test.support.import_helper import import_module @@ -195,6 +195,9 @@ def bar(x): expected = "(30, None, [\'def foo(x):\\n\', \' return x + 1\\n\', \'\\n\'], \'\')" self.assertIn(expected, output, expected) + def test_asyncio_repl_is_ok(self): + assert_python_ok("-m", "asyncio") + class TestInteractiveModeSyntaxErrors(unittest.TestCase): diff --git a/Lib/test/test_runpy.py b/Lib/test/test_runpy.py index 9d76764c75be3e..b64383f6546f31 100644 --- a/Lib/test/test_runpy.py +++ b/Lib/test/test_runpy.py @@ -15,7 +15,7 @@ from test.support import (infinite_recursion, no_tracing, verbose, requires_subprocess, requires_resource) from test.support.import_helper import forget, make_legacy_pyc, unload -from test.support.os_helper import create_empty_file, temp_dir +from test.support.os_helper import create_empty_file, temp_dir, FakePath from test.support.script_helper import make_script, make_zip_script @@ -657,14 +657,13 @@ def test_basic_script(self): self._check_script(script_name, "", script_name, script_name, expect_spec=False) - def test_basic_script_with_path_object(self): + def test_basic_script_with_pathlike_object(self): with temp_dir() as script_dir: mod_name = 'script' - script_name = pathlib.Path(self._make_test_script(script_dir, - mod_name)) - self._check_script(script_name, "", - os.fsdecode(script_name), - os.fsdecode(script_name), + script_name = self._make_test_script(script_dir, mod_name) + self._check_script(FakePath(script_name), "", + script_name, + script_name, expect_spec=False) def test_basic_script_no_suffix(self): diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py index 5b0aac67a0adeb..bccb81e0737c57 100644 --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -741,6 +741,16 @@ def _onexc(fn, path, exc): shutil.rmtree(TESTFN) raise + def test_rmtree_above_recursion_limit(self): + recursion_limit = 40 + # directory_depth > recursion_limit + directory_depth = recursion_limit + 10 + base = os.path.join(TESTFN, *(['d'] * directory_depth)) + os.makedirs(base) + + with support.infinite_recursion(recursion_limit): + shutil.rmtree(TESTFN) + class TestCopyTree(BaseTest, unittest.TestCase): @@ -914,7 +924,7 @@ def _ignore(src, names): 'test.txt'))) dst_dir = join(self.mkdtemp(), 'destination') - shutil.copytree(pathlib.Path(src_dir), dst_dir, ignore=_ignore) + shutil.copytree(FakePath(src_dir), dst_dir, ignore=_ignore) self.assertTrue(exists(join(dst_dir, 'test_dir', 'subdir', 'test.txt'))) @@ -2107,7 +2117,7 @@ def check_unpack_archive(self, format, **kwargs): self.check_unpack_archive_with_converter( format, lambda path: path, **kwargs) self.check_unpack_archive_with_converter( - format, pathlib.Path, **kwargs) + format, FakePath, **kwargs) self.check_unpack_archive_with_converter(format, FakePath, **kwargs) def check_unpack_archive_with_converter(self, format, converter, **kwargs): @@ -2672,12 +2682,12 @@ def test_move_file_to_dir(self): def test_move_file_to_dir_pathlike_src(self): # Move a pathlike file to another location on the same filesystem. - src = pathlib.Path(self.src_file) + src = FakePath(self.src_file) self._check_move_file(src, self.dst_dir, self.dst_file) def test_move_file_to_dir_pathlike_dst(self): # Move a file to another pathlike location on the same filesystem. - dst = pathlib.Path(self.dst_dir) + dst = FakePath(self.dst_dir) self._check_move_file(self.src_file, dst, self.dst_file) @mock_rename diff --git a/Lib/test/test_signal.py b/Lib/test/test_signal.py index 61fb047caf6dab..591cd4177d9f41 100644 --- a/Lib/test/test_signal.py +++ b/Lib/test/test_signal.py @@ -698,7 +698,7 @@ def handler(signum, frame): @unittest.skipUnless(hasattr(os, "pipe"), "requires os.pipe()") class SiginterruptTest(unittest.TestCase): - def readpipe_interrupted(self, interrupt): + def readpipe_interrupted(self, interrupt, timeout=support.SHORT_TIMEOUT): """Perform a read during which a signal will arrive. Return True if the read is interrupted by the signal and raises an exception. Return False if it returns normally. @@ -746,7 +746,7 @@ def handler(signum, frame): # wait until the child process is loaded and has started first_line = process.stdout.readline() - stdout, stderr = process.communicate(timeout=support.SHORT_TIMEOUT) + stdout, stderr = process.communicate(timeout=timeout) except subprocess.TimeoutExpired: process.kill() return False @@ -777,7 +777,7 @@ def test_siginterrupt_off(self): # If a signal handler is installed and siginterrupt is called with # a false value for the second argument, when that signal arrives, it # does not interrupt a syscall that's in progress. - interrupted = self.readpipe_interrupted(False) + interrupted = self.readpipe_interrupted(False, timeout=2) self.assertFalse(interrupted) diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py index 0c4b3bb2ad4d81..680c104db705b1 100644 --- a/Lib/test/test_socket.py +++ b/Lib/test/test_socket.py @@ -47,6 +47,7 @@ # test unicode string and carriage return MSG = 'Michael Gilfix was here\u1234\r\n'.encode('utf-8') +VMADDR_CID_LOCAL = 1 VSOCKPORT = 1234 AIX = platform.system() == "AIX" WSL = "microsoft-standard-WSL" in platform.release() @@ -160,8 +161,8 @@ def _have_socket_qipcrtr(): def _have_socket_vsock(): """Check whether AF_VSOCK sockets are supported on this host.""" - ret = get_cid() is not None - return ret + cid = get_cid() + return (cid is not None) def _have_socket_bluetooth(): @@ -520,8 +521,6 @@ def clientTearDown(self): @unittest.skipIf(WSL, 'VSOCK does not work on Microsoft WSL') @unittest.skipUnless(HAVE_SOCKET_VSOCK, 'VSOCK sockets required for this test.') -@unittest.skipUnless(get_cid() != 2, - "This test can only be run on a virtual guest.") class ThreadedVSOCKSocketStreamTest(unittest.TestCase, ThreadableTest): def __init__(self, methodName='runTest'): @@ -543,6 +542,9 @@ def clientSetUp(self): self.cli = socket.socket(socket.AF_VSOCK, socket.SOCK_STREAM) self.addCleanup(self.cli.close) cid = get_cid() + if cid in (socket.VMADDR_CID_HOST, socket.VMADDR_CID_ANY): + # gh-119461: Use the local communication address (loopback) + cid = VMADDR_CID_LOCAL self.cli.connect((cid, VSOCKPORT)) def testStream(self): diff --git a/Lib/test/test_statistics.py b/Lib/test/test_statistics.py index 40680759d456ac..6f68edd447c953 100644 --- a/Lib/test/test_statistics.py +++ b/Lib/test/test_statistics.py @@ -2446,6 +2446,7 @@ def test_kde_kernel_invcdfs(self): for x in xarr: self.assertAlmostEqual(invcdf(cdf(x)), x, places=5) + @support.requires_resource('cpu') def test_kde_random(self): kde_random = statistics.kde_random StatisticsError = statistics.StatisticsError diff --git a/Lib/test/test_strptime.py b/Lib/test/test_strptime.py index 05c8afc907ad3c..038746e26c24ad 100644 --- a/Lib/test/test_strptime.py +++ b/Lib/test/test_strptime.py @@ -7,7 +7,7 @@ import os import sys from test import support -from test.support import skip_if_buggy_ucrt_strfptime +from test.support import skip_if_buggy_ucrt_strfptime, warnings_helper from datetime import date as datetime_date import _strptime @@ -120,7 +120,7 @@ def setUp(self): def test_pattern(self): # Test TimeRE.pattern - pattern_string = self.time_re.pattern(r"%a %A %d") + pattern_string = self.time_re.pattern(r"%a %A %d %Y") self.assertTrue(pattern_string.find(self.locale_time.a_weekday[2]) != -1, "did not find abbreviated weekday in pattern string '%s'" % pattern_string) @@ -160,10 +160,11 @@ def test_compile(self): found.group('b'))) for directive in ('a','A','b','B','c','d','G','H','I','j','m','M','p', 'S','u','U','V','w','W','x','X','y','Y','Z','%'): - compiled = self.time_re.compile("%" + directive) - found = compiled.match(time.strftime("%" + directive)) + fmt = "%d %Y" if directive == 'd' else "%" + directive + compiled = self.time_re.compile(fmt) + found = compiled.match(time.strftime(fmt)) self.assertTrue(found, "Matching failed on '%s' using '%s' regex" % - (time.strftime("%" + directive), + (time.strftime(fmt), compiled.pattern)) def test_blankpattern(self): @@ -290,8 +291,9 @@ def test_unconverteddata(self): def helper(self, directive, position): """Helper fxn in testing.""" - strf_output = time.strftime("%" + directive, self.time_tuple) - strp_output = _strptime._strptime_time(strf_output, "%" + directive) + fmt = "%d %Y" if directive == 'd' else "%" + directive + strf_output = time.strftime(fmt, self.time_tuple) + strp_output = _strptime._strptime_time(strf_output, fmt) self.assertTrue(strp_output[position] == self.time_tuple[position], "testing of '%s' directive failed; '%s' -> %s != %s" % (directive, strf_output, strp_output[position], @@ -497,9 +499,11 @@ def test_escaping(self): need_escaping = r".^$*+?{}\[]|)(" self.assertTrue(_strptime._strptime_time(need_escaping, need_escaping)) + @warnings_helper.ignore_warnings(category=DeprecationWarning) # gh-70647 def test_feb29_on_leap_year_without_year(self): time.strptime("Feb 29", "%b %d") + @warnings_helper.ignore_warnings(category=DeprecationWarning) # gh-70647 def test_mar1_comes_after_feb29_even_when_omitting_the_year(self): self.assertLess( time.strptime("Feb 29", "%b %d"), @@ -679,25 +683,25 @@ class CacheTests(unittest.TestCase): def test_time_re_recreation(self): # Make sure cache is recreated when current locale does not match what # cached object was created with. - _strptime._strptime_time("10", "%d") + _strptime._strptime_time("10 2004", "%d %Y") _strptime._strptime_time("2005", "%Y") _strptime._TimeRE_cache.locale_time.lang = "Ni" original_time_re = _strptime._TimeRE_cache - _strptime._strptime_time("10", "%d") + _strptime._strptime_time("10 2004", "%d %Y") self.assertIsNot(original_time_re, _strptime._TimeRE_cache) self.assertEqual(len(_strptime._regex_cache), 1) def test_regex_cleanup(self): # Make sure cached regexes are discarded when cache becomes "full". try: - del _strptime._regex_cache['%d'] + del _strptime._regex_cache['%d %Y'] except KeyError: pass bogus_key = 0 while len(_strptime._regex_cache) <= _strptime._CACHE_MAX_SIZE: _strptime._regex_cache[bogus_key] = None bogus_key += 1 - _strptime._strptime_time("10", "%d") + _strptime._strptime_time("10 2004", "%d %Y") self.assertEqual(len(_strptime._regex_cache), 1) def test_new_localetime(self): @@ -705,7 +709,7 @@ def test_new_localetime(self): # is created. locale_time_id = _strptime._TimeRE_cache.locale_time _strptime._TimeRE_cache.locale_time.lang = "Ni" - _strptime._strptime_time("10", "%d") + _strptime._strptime_time("10 2004", "%d %Y") self.assertIsNot(locale_time_id, _strptime._TimeRE_cache.locale_time) def test_TimeRE_recreation_locale(self): @@ -716,13 +720,13 @@ def test_TimeRE_recreation_locale(self): except locale.Error: self.skipTest('test needs en_US.UTF8 locale') try: - _strptime._strptime_time('10', '%d') + _strptime._strptime_time('10 2004', '%d %Y') # Get id of current cache object. first_time_re = _strptime._TimeRE_cache try: # Change the locale and force a recreation of the cache. locale.setlocale(locale.LC_TIME, ('de_DE', 'UTF8')) - _strptime._strptime_time('10', '%d') + _strptime._strptime_time('10 2004', '%d %Y') # Get the new cache object's id. second_time_re = _strptime._TimeRE_cache # They should not be equal. diff --git a/Lib/test/test_struct.py b/Lib/test/test_struct.py index 15f6ee06ffe19b..5508cc3eec85c8 100644 --- a/Lib/test/test_struct.py +++ b/Lib/test/test_struct.py @@ -9,7 +9,7 @@ import weakref from test import support -from test.support import import_helper +from test.support import import_helper, suppress_immortalization from test.support.script_helper import assert_python_ok ISBIGENDIAN = sys.byteorder == "big" @@ -674,6 +674,7 @@ def __del__(self): self.assertIn(b"Exception ignored in:", stderr) self.assertIn(b"C.__del__", stderr) + @suppress_immortalization() def test__struct_reference_cycle_cleaned_up(self): # Regression test for python/cpython#94207. diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py index 9ecd8426cb5537..8b69cd03ba7f24 100644 --- a/Lib/test/test_subprocess.py +++ b/Lib/test/test_subprocess.py @@ -25,7 +25,6 @@ import gc import textwrap import json -import pathlib from test.support.os_helper import FakePath try: @@ -1522,9 +1521,6 @@ def test_communicate_epipe(self): p.communicate(b"x" * 2**20) def test_repr(self): - path_cmd = pathlib.Path("my-tool.py") - pathlib_cls = path_cmd.__class__.__name__ - cases = [ ("ls", True, 123, ""), ('a' * 100, True, 0, @@ -1532,7 +1528,8 @@ def test_repr(self): (["ls"], False, None, ""), (["ls", '--my-opts', 'a' * 100], False, None, ""), - (path_cmd, False, 7, f"") + (os_helper.FakePath("my-tool.py"), False, 7, + ">") ] with unittest.mock.patch.object(subprocess.Popen, '_execute_child'): for cmd, shell, code, sx in cases: diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index b978838ea7003f..491f7fd7908e97 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -1853,28 +1853,6 @@ Traceback (most recent call last): SyntaxError: positional patterns follow keyword patterns -Non-matching 'elif'/'else' statements: - - >>> if a == b: - ... ... - ... elif a == c: - Traceback (most recent call last): - SyntaxError: 'elif' must match an if-statement here - - >>> if x == y: - ... ... - ... else: - Traceback (most recent call last): - SyntaxError: 'else' must match a valid statement here - - >>> elif m == n: - Traceback (most recent call last): - SyntaxError: 'elif' must match an if-statement here - - >>> else: - Traceback (most recent call last): - SyntaxError: 'else' must match a valid statement here - Uses of the star operator which should fail: A[:*b] @@ -2167,8 +2145,8 @@ def _check_error(self, code, errtext, lineno=None, offset=None, end_lineno=None, end_offset=None): """Check that compiling code raises SyntaxError with errtext. - errtext is a regular expression that must be present in the - test of the exception raised. If subclass is specified, it + errtest is a regular expression that must be present in the + test of the exception raised. If subclass is specified it is the expected subclass of SyntaxError (e.g. IndentationError). """ try: @@ -2192,22 +2170,6 @@ def _check_error(self, code, errtext, else: self.fail("compile() did not raise SyntaxError") - def _check_noerror(self, code, - errtext="compile() raised unexpected SyntaxError", - filename="", mode="exec", subclass=None): - """Check that compiling code does not raise a SyntaxError. - - errtext is the message passed to self.fail if there is - a SyntaxError. If the subclass parameter is specified, - it is the subclass of SyntaxError (e.g. IndentationError) - that the raised error is checked against. - """ - try: - compile(code, filename, mode) - except SyntaxError as err: - if (not subclass) or isinstance(err, subclass): - self.fail(errtext) - def test_expression_with_assignment(self): self._check_error( "print(end1 + end2 = ' ')", @@ -2609,25 +2571,6 @@ def test_syntax_error_on_deeply_nested_blocks(self): """ self._check_error(source, "too many statically nested blocks") - def test_syntax_error_non_matching_elif_else_statements(self): - # Check bpo-45759: 'elif' statements that doesn't match an - # if-statement or 'else' statements that doesn't match any - # valid else-able statement (e.g. 'while') - self._check_error( - "elif m == n:\n ...", - "'elif' must match an if-statement here") - self._check_error( - "else:\n ...", - "'else' must match a valid statement here") - self._check_noerror("if a == b:\n ...\nelif a == c:\n ...") - self._check_noerror("if x == y:\n ...\nelse:\n ...") - self._check_error( - "else = 123", - "invalid syntax") - self._check_error( - "elif 55 = 123", - "cannot assign to literal here") - @support.cpython_only def test_error_on_parser_stack_overflow(self): source = "-" * 100000 + "4" diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py index ee3bd0092f9bf3..ed398060f791c9 100644 --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -394,10 +394,15 @@ def test_dlopenflags(self): @test.support.refcount_test def test_refcount(self): - # n here must be a global in order for this test to pass while - # tracing with a python function. Tracing calls PyFrame_FastToLocals - # which will add a copy of any locals to the frame object, causing - # the reference count to increase by 2 instead of 1. + # n here originally had to be a global in order for this test to pass + # while tracing with a python function. Tracing used to call + # PyFrame_FastToLocals, which would add a copy of any locals to the + # frame object, causing the ref count to increase by 2 instead of 1. + # While that no longer happens (due to PEP 667), this test case retains + # its original global-based implementation + # PEP 683's immortal objects also made this point moot, since the + # refcount for None doesn't change anyway. Maybe this test should be + # using a different constant value? (e.g. an integer) global n self.assertRaises(TypeError, sys.getrefcount) c = sys.getrefcount(None) diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py index c9c1097963a885..f715940de1d584 100644 --- a/Lib/test/test_tarfile.py +++ b/Lib/test/test_tarfile.py @@ -386,7 +386,7 @@ def test_is_tarfile_erroneous(self): self.assertFalse(tarfile.is_tarfile(tmpname)) # is_tarfile works on path-like objects - self.assertFalse(tarfile.is_tarfile(pathlib.Path(tmpname))) + self.assertFalse(tarfile.is_tarfile(os_helper.FakePath(tmpname))) # is_tarfile works on file objects with open(tmpname, "rb") as fobj: @@ -400,7 +400,7 @@ def test_is_tarfile_valid(self): self.assertTrue(tarfile.is_tarfile(self.tarname)) # is_tarfile works on path-like objects - self.assertTrue(tarfile.is_tarfile(pathlib.Path(self.tarname))) + self.assertTrue(tarfile.is_tarfile(os_helper.FakePath(self.tarname))) # is_tarfile works on file objects with open(self.tarname, "rb") as fobj: @@ -576,21 +576,23 @@ def test_bytes_name_attribute(self): self.assertIsInstance(tar.name, bytes) self.assertEqual(tar.name, os.path.abspath(fobj.name)) - def test_pathlike_name(self): - tarname = pathlib.Path(self.tarname) + def test_pathlike_name(self, tarname=None): + if tarname is None: + tarname = self.tarname + expected = os.path.abspath(tarname) + tarname = os_helper.FakePath(tarname) with tarfile.open(tarname, mode=self.mode) as tar: - self.assertIsInstance(tar.name, str) - self.assertEqual(tar.name, os.path.abspath(os.fspath(tarname))) + self.assertEqual(tar.name, expected) with self.taropen(tarname) as tar: - self.assertIsInstance(tar.name, str) - self.assertEqual(tar.name, os.path.abspath(os.fspath(tarname))) + self.assertEqual(tar.name, expected) with tarfile.TarFile.open(tarname, mode=self.mode) as tar: - self.assertIsInstance(tar.name, str) - self.assertEqual(tar.name, os.path.abspath(os.fspath(tarname))) + self.assertEqual(tar.name, expected) if self.suffix == '': with tarfile.TarFile(tarname, mode='r') as tar: - self.assertIsInstance(tar.name, str) - self.assertEqual(tar.name, os.path.abspath(os.fspath(tarname))) + self.assertEqual(tar.name, expected) + + def test_pathlike_bytes_name(self): + self.test_pathlike_name(os.fsencode(self.tarname)) def test_illegal_mode_arg(self): with open(tmpname, 'wb'): @@ -761,24 +763,24 @@ def test_deprecation_if_no_filter_passed_to_extract(self): # check that the stacklevel of the deprecation warning is correct: self.assertEqual(cm.filename, __file__) - def test_extractall_pathlike_name(self): - DIR = pathlib.Path(TEMPDIR) / "extractall" + def test_extractall_pathlike_dir(self): + DIR = os.path.join(TEMPDIR, "extractall") with os_helper.temp_dir(DIR), \ tarfile.open(tarname, encoding="iso8859-1") as tar: directories = [t for t in tar if t.isdir()] - tar.extractall(DIR, directories, filter='fully_trusted') + tar.extractall(os_helper.FakePath(DIR), directories, filter='fully_trusted') for tarinfo in directories: - path = DIR / tarinfo.name + path = os.path.join(DIR, tarinfo.name) self.assertEqual(os.path.getmtime(path), tarinfo.mtime) - def test_extract_pathlike_name(self): + def test_extract_pathlike_dir(self): dirtype = "ustar/dirtype" - DIR = pathlib.Path(TEMPDIR) / "extractall" + DIR = os.path.join(TEMPDIR, "extractall") with os_helper.temp_dir(DIR), \ tarfile.open(tarname, encoding="iso8859-1") as tar: tarinfo = tar.getmember(dirtype) - tar.extract(tarinfo, path=DIR, filter='fully_trusted') - extracted = DIR / dirtype + tar.extract(tarinfo, path=os_helper.FakePath(DIR), filter='fully_trusted') + extracted = os.path.join(DIR, dirtype) self.assertEqual(os.path.getmtime(extracted), tarinfo.mtime) def test_init_close_fobj(self): @@ -1390,11 +1392,11 @@ def test_ordered_recursion(self): def test_gettarinfo_pathlike_name(self): with tarfile.open(tmpname, self.mode) as tar: - path = pathlib.Path(TEMPDIR) / "file" + path = os.path.join(TEMPDIR, "file") with open(path, "wb") as fobj: fobj.write(b"aaa") - tarinfo = tar.gettarinfo(path) - tarinfo2 = tar.gettarinfo(os.fspath(path)) + tarinfo = tar.gettarinfo(os_helper.FakePath(path)) + tarinfo2 = tar.gettarinfo(path) self.assertIsInstance(tarinfo.name, str) self.assertEqual(tarinfo.name, tarinfo2.name) self.assertEqual(tarinfo.size, 3) @@ -1947,10 +1949,10 @@ def test_create_existing_taropen(self): self.assertIn("spameggs42", names[0]) def test_create_pathlike_name(self): - with tarfile.open(pathlib.Path(tmpname), self.mode) as tobj: + with tarfile.open(os_helper.FakePath(tmpname), self.mode) as tobj: self.assertIsInstance(tobj.name, str) self.assertEqual(tobj.name, os.path.abspath(tmpname)) - tobj.add(pathlib.Path(self.file_path)) + tobj.add(os_helper.FakePath(self.file_path)) names = tobj.getnames() self.assertEqual(len(names), 1) self.assertIn('spameggs42', names[0]) @@ -1961,10 +1963,10 @@ def test_create_pathlike_name(self): self.assertIn('spameggs42', names[0]) def test_create_taropen_pathlike_name(self): - with self.taropen(pathlib.Path(tmpname), "x") as tobj: + with self.taropen(os_helper.FakePath(tmpname), "x") as tobj: self.assertIsInstance(tobj.name, str) self.assertEqual(tobj.name, os.path.abspath(tmpname)) - tobj.add(pathlib.Path(self.file_path)) + tobj.add(os_helper.FakePath(self.file_path)) names = tobj.getnames() self.assertEqual(len(names), 1) self.assertIn('spameggs42', names[0]) diff --git a/Lib/test/test_tempfile.py b/Lib/test/test_tempfile.py index 19ddeaa169bf93..a5e182cef23dc5 100644 --- a/Lib/test/test_tempfile.py +++ b/Lib/test/test_tempfile.py @@ -63,16 +63,10 @@ def test_infer_return_type_multiples_and_none(self): tempfile._infer_return_type(b'', None, '') def test_infer_return_type_pathlib(self): - self.assertIs(str, tempfile._infer_return_type(pathlib.Path('/'))) + self.assertIs(str, tempfile._infer_return_type(os_helper.FakePath('/'))) def test_infer_return_type_pathlike(self): - class Path: - def __init__(self, path): - self.path = path - - def __fspath__(self): - return self.path - + Path = os_helper.FakePath self.assertIs(str, tempfile._infer_return_type(Path('/'))) self.assertIs(bytes, tempfile._infer_return_type(Path(b'/'))) self.assertIs(str, tempfile._infer_return_type('', Path(''))) @@ -443,7 +437,7 @@ def test_choose_directory(self): dir = tempfile.mkdtemp() try: self.do_create(dir=dir).write(b"blat") - self.do_create(dir=pathlib.Path(dir)).write(b"blat") + self.do_create(dir=os_helper.FakePath(dir)).write(b"blat") finally: support.gc_collect() # For PyPy or other GCs. os.rmdir(dir) @@ -681,7 +675,7 @@ def test_choose_directory(self): dir = tempfile.mkdtemp() try: self.do_create(dir=dir) - self.do_create(dir=pathlib.Path(dir)) + self.do_create(dir=os_helper.FakePath(dir)) finally: os.rmdir(dir) @@ -782,7 +776,7 @@ def test_choose_directory(self): dir = tempfile.mkdtemp() try: os.rmdir(self.do_create(dir=dir)) - os.rmdir(self.do_create(dir=pathlib.Path(dir))) + os.rmdir(self.do_create(dir=os_helper.FakePath(dir))) finally: os.rmdir(dir) diff --git a/Lib/test/test_tkinter/test_images.py b/Lib/test/test_tkinter/test_images.py index b8e549e314d27d..38371fe00d6eb5 100644 --- a/Lib/test/test_tkinter/test_images.py +++ b/Lib/test/test_tkinter/test_images.py @@ -581,13 +581,15 @@ def test_write(self): image.write(filename, background='#ff0000') image4 = tkinter.PhotoImage('::img::test4', master=self.root, format='ppm', file=filename) - self.assertEqual(image4.get(0, 0), (255, 0, 0)) + self.assertEqual(image4.get(0, 0), (255, 0, 0) if self.wantobjects else '255 0 0') self.assertEqual(image4.get(4, 6), image.get(4, 6)) image.write(filename, grayscale=True) image5 = tkinter.PhotoImage('::img::test5', master=self.root, format='ppm', file=filename) c = image5.get(4, 6) + if not self.wantobjects: + c = c.split() self.assertTrue(c[0] == c[1] == c[2], c) def test_data(self): @@ -597,7 +599,10 @@ def test_data(self): self.assertIsInstance(data, tuple) for row in data: self.assertIsInstance(row, str) - self.assertEqual(data[6].split()[4], '#%02x%02x%02x' % image.get(4, 6)) + c = image.get(4, 6) + if not self.wantobjects: + c = tuple(map(int, c.split())) + self.assertEqual(data[6].split()[4], '#%02x%02x%02x' % c) data = image.data('ppm') image2 = tkinter.PhotoImage('::img::test2', master=self.root, @@ -622,13 +627,15 @@ def test_data(self): data = image.data('ppm', background='#ff0000') image4 = tkinter.PhotoImage('::img::test4', master=self.root, format='ppm', data=data) - self.assertEqual(image4.get(0, 0), (255, 0, 0)) + self.assertEqual(image4.get(0, 0), (255, 0, 0) if self.wantobjects else '255 0 0') self.assertEqual(image4.get(4, 6), image.get(4, 6)) data = image.data('ppm', grayscale=True) image5 = tkinter.PhotoImage('::img::test5', master=self.root, format='ppm', data=data) c = image5.get(4, 6) + if not self.wantobjects: + c = c.split() self.assertTrue(c[0] == c[1] == c[2], c) diff --git a/Lib/test/test_tkinter/test_misc.py b/Lib/test/test_tkinter/test_misc.py index 6dca2a3920e06a..d9ea642881a179 100644 --- a/Lib/test/test_tkinter/test_misc.py +++ b/Lib/test/test_tkinter/test_misc.py @@ -532,6 +532,284 @@ def test_wm_attribute(self): 1.0 if self.wantobjects else '1.0') +class EventTest(AbstractTkTest, unittest.TestCase): + + def test_focus(self): + f = tkinter.Frame(self.root, width=150, height=100) + f.pack() + self.root.wait_visibility() # needed on Windows + self.root.update_idletasks() + + events = [] + f.bind('', events.append) + + f.focus_force() + self.root.update() + self.assertEqual(len(events), 1, events) + e = events[0] + self.assertIs(e.type, tkinter.EventType.FocusIn) + self.assertIs(e.widget, f) + self.assertIsInstance(e.serial, int) + self.assertEqual(e.time, '??') + self.assertIs(e.send_event, False) + self.assertFalse(hasattr(e, 'focus')) + self.assertEqual(e.num, '??') + self.assertEqual(e.state, '??') + self.assertEqual(e.char, '??') + self.assertEqual(e.keycode, '??') + self.assertEqual(e.keysym, '??') + self.assertEqual(e.keysym_num, '??') + self.assertEqual(e.width, '??') + self.assertEqual(e.height, '??') + self.assertEqual(e.x, '??') + self.assertEqual(e.y, '??') + self.assertEqual(e.x_root, '??') + self.assertEqual(e.y_root, '??') + self.assertEqual(e.delta, 0) + self.assertEqual(repr(e), '') + + def test_configure(self): + f = tkinter.Frame(self.root, width=150, height=100) + f.pack() + self.root.wait_visibility() # needed on Windows + self.root.update_idletasks() + + events = [] + f.bind('', events.append) + + f.configure(height=120, borderwidth=10) + self.assertEqual(len(events), 1, events) + e = events[0] + self.assertIs(e.type, tkinter.EventType.Configure) + self.assertIs(e.widget, f) + self.assertIsInstance(e.serial, int) + self.assertEqual(e.time, '??') + self.assertIs(e.send_event, False) + self.assertFalse(hasattr(e, 'focus')) + self.assertEqual(e.num, '??') + self.assertEqual(e.state, '??') + self.assertEqual(e.char, '??') + self.assertEqual(e.keycode, '??') + self.assertEqual(e.keysym, '??') + self.assertEqual(e.keysym_num, '??') + self.assertEqual(e.width, 150) + self.assertEqual(e.height, 100) + self.assertEqual(e.x, 0) + self.assertEqual(e.y, 0) + self.assertEqual(e.x_root, '??') + self.assertEqual(e.y_root, '??') + self.assertEqual(e.delta, 0) + self.assertEqual(repr(e), '') + + def test_event_generate_key_press(self): + f = tkinter.Frame(self.root, width=150, height=100) + f.pack() + self.root.wait_visibility() # needed on Windows + self.root.update_idletasks() + + events = [] + f.bind('', events.append) + f.focus_force() + + f.event_generate('') + self.assertEqual(len(events), 1, events) + e = events[0] + self.assertIs(e.type, tkinter.EventType.KeyPress) + self.assertIs(e.widget, f) + self.assertIsInstance(e.serial, int) + self.assertEqual(e.time, 0) + self.assertIs(e.send_event, False) + self.assertFalse(hasattr(e, 'focus')) + self.assertEqual(e.num, '??') + self.assertIsInstance(e.state, int) + self.assertNotEqual(e.state, 0) + self.assertEqual(e.char, 'z') + self.assertIsInstance(e.keycode, int) + self.assertNotEqual(e.keycode, 0) + self.assertEqual(e.keysym, 'z') + self.assertEqual(e.keysym_num, ord('z')) + self.assertEqual(e.width, '??') + self.assertEqual(e.height, '??') + self.assertEqual(e.x, -1 - f.winfo_rootx()) + self.assertEqual(e.y, -1 - f.winfo_rooty()) + self.assertEqual(e.x_root, -1) + self.assertEqual(e.y_root, -1) + self.assertEqual(e.delta, 0) + self.assertEqual(repr(e), + f"") + + def test_event_generate_enter(self): + f = tkinter.Frame(self.root, width=150, height=100) + f.pack() + self.root.wait_visibility() # needed on Windows + self.root.update_idletasks() + + events = [] + f.bind('', events.append) + + f.event_generate('', x=100, y=50) + self.assertEqual(len(events), 1, events) + e = events[0] + self.assertIs(e.type, tkinter.EventType.Enter) + self.assertIs(e.widget, f) + self.assertIsInstance(e.serial, int) + self.assertEqual(e.time, 0) + self.assertIs(e.send_event, False) + self.assertIs(e.focus, False) + self.assertEqual(e.num, '??') + self.assertEqual(e.state, 0) + self.assertEqual(e.char, '??') + self.assertEqual(e.keycode, '??') + self.assertEqual(e.keysym, '??') + self.assertEqual(e.keysym_num, '??') + self.assertEqual(e.width, '??') + self.assertEqual(e.height, '??') + self.assertEqual(e.x, 100) + self.assertEqual(e.y, 50) + self.assertEqual(e.x_root, 100 + f.winfo_rootx()) + self.assertEqual(e.y_root, 50 + f.winfo_rooty()) + self.assertEqual(e.delta, 0) + self.assertEqual(repr(e), '') + + def test_event_generate_button_press(self): + f = tkinter.Frame(self.root, width=150, height=100) + f.pack() + self.root.wait_visibility() # needed on Windows + self.root.update_idletasks() + + events = [] + f.bind('', events.append) + f.focus_force() + + f.event_generate('', x=100, y=50) + self.assertEqual(len(events), 1, events) + e = events[0] + self.assertIs(e.type, tkinter.EventType.ButtonPress) + self.assertIs(e.widget, f) + self.assertIsInstance(e.serial, int) + self.assertEqual(e.time, 0) + self.assertIs(e.send_event, False) + self.assertFalse(hasattr(e, 'focus')) + self.assertEqual(e.num, 1) + self.assertEqual(e.state, 0) + self.assertEqual(e.char, '??') + self.assertEqual(e.keycode, '??') + self.assertEqual(e.keysym, '??') + self.assertEqual(e.keysym_num, '??') + self.assertEqual(e.width, '??') + self.assertEqual(e.height, '??') + self.assertEqual(e.x, 100) + self.assertEqual(e.y, 50) + self.assertEqual(e.x_root, f.winfo_rootx() + 100) + self.assertEqual(e.y_root, f.winfo_rooty() + 50) + self.assertEqual(e.delta, 0) + self.assertEqual(repr(e), '') + + def test_event_generate_motion(self): + f = tkinter.Frame(self.root, width=150, height=100) + f.pack() + self.root.wait_visibility() # needed on Windows + self.root.update_idletasks() + + events = [] + f.bind('', events.append) + f.focus_force() + + f.event_generate('', x=100, y=50) + self.assertEqual(len(events), 1, events) + e = events[0] + self.assertIs(e.type, tkinter.EventType.Motion) + self.assertIs(e.widget, f) + self.assertIsInstance(e.serial, int) + self.assertEqual(e.time, 0) + self.assertIs(e.send_event, False) + self.assertFalse(hasattr(e, 'focus')) + self.assertEqual(e.num, '??') + self.assertEqual(e.state, 0x100) + self.assertEqual(e.char, '??') + self.assertEqual(e.keycode, '??') + self.assertEqual(e.keysym, '??') + self.assertEqual(e.keysym_num, '??') + self.assertEqual(e.width, '??') + self.assertEqual(e.height, '??') + self.assertEqual(e.x, 100) + self.assertEqual(e.y, 50) + self.assertEqual(e.x_root, f.winfo_rootx() + 100) + self.assertEqual(e.y_root, f.winfo_rooty() + 50) + self.assertEqual(e.delta, 0) + self.assertEqual(repr(e), '') + + def test_event_generate_mouse_wheel(self): + f = tkinter.Frame(self.root, width=150, height=100) + f.pack() + self.root.wait_visibility() # needed on Windows + self.root.update_idletasks() + + events = [] + f.bind('', events.append) + f.focus_force() + + f.event_generate('', x=100, y=50, delta=-5) + self.assertEqual(len(events), 1, events) + e = events[0] + self.assertIs(e.type, tkinter.EventType.MouseWheel) + self.assertIs(e.widget, f) + self.assertIsInstance(e.serial, int) + self.assertIs(e.send_event, False) + self.assertFalse(hasattr(e, 'focus')) + self.assertEqual(e.time, 0) + self.assertEqual(e.num, '??') + self.assertEqual(e.state, 0) + self.assertEqual(e.char, '??') + self.assertEqual(e.keycode, '??') + self.assertEqual(e.keysym, '??') + self.assertEqual(e.keysym_num, '??') + self.assertEqual(e.width, '??') + self.assertEqual(e.height, '??') + self.assertEqual(e.x, 100) + self.assertEqual(e.y, 50) + self.assertEqual(e.x_root, f.winfo_rootx() + 100) + self.assertEqual(e.y_root, f.winfo_rooty() + 50) + self.assertEqual(e.delta, -5) + self.assertEqual(repr(e), '') + + def test_generate_event_virtual_event(self): + f = tkinter.Frame(self.root, width=150, height=100) + f.pack() + self.root.wait_visibility() # needed on Windows + self.root.update_idletasks() + + events = [] + f.bind('<>', events.append) + f.focus_force() + + f.event_generate('<>', x=50) + self.assertEqual(len(events), 1, events) + e = events[0] + self.assertIs(e.type, tkinter.EventType.VirtualEvent) + self.assertIs(e.widget, f) + self.assertIsInstance(e.serial, int) + self.assertEqual(e.time, 0) + self.assertIs(e.send_event, False) + self.assertFalse(hasattr(e, 'focus')) + self.assertEqual(e.num, '??') + self.assertEqual(e.state, 0) + self.assertEqual(e.char, '??') + self.assertEqual(e.keycode, '??') + self.assertEqual(e.keysym, '??') + self.assertEqual(e.keysym_num, '??') + self.assertEqual(e.width, '??') + self.assertEqual(e.height, '??') + self.assertEqual(e.x, 50) + self.assertEqual(e.y, 0) + self.assertEqual(e.x_root, f.winfo_rootx() + 50) + self.assertEqual(e.y_root, -1) + self.assertEqual(e.delta, 0) + self.assertEqual(repr(e), + f"") + + class BindTest(AbstractTkTest, unittest.TestCase): def setUp(self): diff --git a/Lib/test/test_tkinter/test_widgets.py b/Lib/test/test_tkinter/test_widgets.py index 85bf5ff7652b69..f5f2fd2ee37b84 100644 --- a/Lib/test/test_tkinter/test_widgets.py +++ b/Lib/test/test_tkinter/test_widgets.py @@ -660,7 +660,9 @@ def test_configure_tabs(self): widget = self.create() self.checkParam(widget, 'tabs', (10.2, 20.7, '1i', '2i')) self.checkParam(widget, 'tabs', '10.2 20.7 1i 2i', - expected=('10.2', '20.7', '1i', '2i')) + expected=(10.2, 20.7, '1i', '2i') + if get_tk_patchlevel(self.root) >= (8, 6, 14) + else ('10.2', '20.7', '1i', '2i')) self.checkParam(widget, 'tabs', '2c left 4c 6c center', expected=('2c', 'left', '4c', '6c', 'center')) self.checkInvalidParam(widget, 'tabs', 'spam', @@ -999,12 +1001,16 @@ def test_itemconfigure(self): widget.itemconfigure() with self.assertRaisesRegex(TclError, 'bad listbox index "red"'): widget.itemconfigure('red') + if get_tk_patchlevel(self.root) >= (8, 6, 14): + prefix = ('background', '', '', '') + else: + prefix = ('background', 'background', 'Background', '') self.assertEqual(widget.itemconfigure(0, 'background'), - ('background', 'background', 'Background', '', 'red')) + (*prefix, 'red')) self.assertEqual(widget.itemconfigure('end', 'background'), - ('background', 'background', 'Background', '', 'violet')) + (*prefix, 'violet')) self.assertEqual(widget.itemconfigure('@0,0', 'background'), - ('background', 'background', 'Background', '', 'red')) + (*prefix, 'red')) d = widget.itemconfigure(0) self.assertIsInstance(d, dict) diff --git a/Lib/test/test_trace.py b/Lib/test/test_trace.py index c1e289bcaff9e5..7ff3fe4091dfa4 100644 --- a/Lib/test/test_trace.py +++ b/Lib/test/test_trace.py @@ -1,11 +1,12 @@ import os from pickle import dump import sys -from test.support import captured_stdout, requires_resource +from test.support import captured_stdout, requires_resource, requires_gil_enabled from test.support.os_helper import (TESTFN, rmtree, unlink) from test.support.script_helper import assert_python_ok, assert_python_failure import textwrap import unittest +from types import FunctionType import trace from trace import Trace @@ -300,6 +301,7 @@ def test_loop_caller_importing(self): @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(), 'pre-existing trace function throws off measurements') + @requires_gil_enabled("gh-117783: immortalization of types affects traced method names") def test_inst_method_calling(self): obj = TracedClass(20) self.tracer.runfunc(obj.inst_method_calling, 1) @@ -333,6 +335,7 @@ def setUp(self): @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(), 'pre-existing trace function throws off measurements') + @requires_gil_enabled("gh-117783: immortalization of types affects traced method names") def test_loop_caller_importing(self): self.tracer.runfunc(traced_func_importing_caller, 1) @@ -559,5 +562,29 @@ def test_run_as_module(self): assert_python_failure('-m', 'trace', '-l', '--module', 'not_a_module_zzz') +class TestTrace(unittest.TestCase): + def setUp(self): + self.addCleanup(sys.settrace, sys.gettrace()) + self.tracer = Trace(count=0, trace=1) + self.filemod = my_file_and_modname() + + def test_no_source_file(self): + filename = "" + co = traced_func_linear.__code__ + co = co.replace(co_filename=filename) + f = FunctionType(co, globals()) + + with captured_stdout() as out: + self.tracer.runfunc(f, 2, 3) + + out = out.getvalue().splitlines() + firstlineno = get_firstlineno(f) + self.assertIn(f" --- modulename: {self.filemod[1]}, funcname: {f.__code__.co_name}", out[0]) + self.assertIn(f"{filename}({firstlineno + 1})", out[1]) + self.assertIn(f"{filename}({firstlineno + 2})", out[2]) + self.assertIn(f"{filename}({firstlineno + 3})", out[3]) + self.assertIn(f"{filename}({firstlineno + 4})", out[4]) + + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py index 5987ec382e6c85..5035de114b5e9d 100644 --- a/Lib/test/test_traceback.py +++ b/Lib/test/test_traceback.py @@ -543,7 +543,7 @@ def test_signatures(self): self.assertEqual( str(inspect.signature(traceback.format_exception_only)), - '(exc, /, value=, *, show_group=False)') + '(exc, /, value=, *, show_group=False, **kwargs)') class PurePythonExceptionFormattingMixin: diff --git a/Lib/test/test_ttk/test_widgets.py b/Lib/test/test_ttk/test_widgets.py index ca7402b276013d..2e0702da5448a8 100644 --- a/Lib/test/test_ttk/test_widgets.py +++ b/Lib/test/test_ttk/test_widgets.py @@ -27,13 +27,20 @@ def test_configure_class(self): def test_configure_padding(self): widget = self.create() - self.checkParam(widget, 'padding', 0, expected=('0',)) - self.checkParam(widget, 'padding', 5, expected=('5',)) - self.checkParam(widget, 'padding', (5, 6), expected=('5', '6')) + if get_tk_patchlevel(self.root) < (8, 6, 14): + def padding_conv(value): + self.assertIsInstance(value, tuple) + return tuple(map(str, value)) + else: + padding_conv = None + self.checkParam(widget, 'padding', 0, expected=(0,), conv=padding_conv) + self.checkParam(widget, 'padding', 5, expected=(5,), conv=padding_conv) + self.checkParam(widget, 'padding', (5, 6), + expected=(5, 6), conv=padding_conv) self.checkParam(widget, 'padding', (5, 6, 7), - expected=('5', '6', '7')) + expected=(5, 6, 7), conv=padding_conv) self.checkParam(widget, 'padding', (5, 6, 7, 8), - expected=('5', '6', '7', '8')) + expected=(5, 6, 7, 8), conv=padding_conv) self.checkParam(widget, 'padding', ('5p', '6p', '7p', '8p')) self.checkParam(widget, 'padding', (), expected='') diff --git a/Lib/test/test_type_aliases.py b/Lib/test/test_type_aliases.py index 9c325bc595f585..f8b395fdc8bb1d 100644 --- a/Lib/test/test_type_aliases.py +++ b/Lib/test/test_type_aliases.py @@ -1,4 +1,5 @@ import pickle +import textwrap import types import unittest from test.support import check_syntax_error, run_code @@ -328,3 +329,22 @@ def test_pickling_local(self): with self.subTest(thing=thing, proto=proto): with self.assertRaises(pickle.PickleError): pickle.dumps(thing, protocol=proto) + + +class TypeParamsExoticGlobalsTest(unittest.TestCase): + def test_exec_with_unusual_globals(self): + class customdict(dict): + def __missing__(self, key): + return key + + code = compile("type Alias = undefined", "test", "exec") + ns = customdict() + exec(code, ns) + Alias = ns["Alias"] + self.assertEqual(Alias.__value__, "undefined") + + code = compile("class A: type Alias = undefined", "test", "exec") + ns = customdict() + exec(code, ns) + Alias = ns["A"].Alias + self.assertEqual(Alias.__value__, "undefined") diff --git a/Lib/test/test_type_params.py b/Lib/test/test_type_params.py index 82f1007f9ac97b..bf1a34b9fc82b3 100644 --- a/Lib/test/test_type_params.py +++ b/Lib/test/test_type_params.py @@ -563,6 +563,11 @@ class C[T, U]: self.assertIs(T, C.Alias.__type_params__[0]) self.assertIs(U, C.__type_params__[1]) + def test_type_special_case(self): + # https://github.com/python/cpython/issues/119011 + self.assertEqual(type.__type_params__, ()) + self.assertEqual(object.__type_params__, ()) + def make_base(arg): class Base: @@ -823,6 +828,100 @@ def meth[__U](self, arg: __T, arg2: __U): self.assertEqual(Foo.Alias.__value__, (T, V)) + def test_no_leaky_mangling_in_module(self): + ns = run_code(""" + __before = "before" + class X[T]: pass + __after = "after" + """) + self.assertEqual(ns["__before"], "before") + self.assertEqual(ns["__after"], "after") + + def test_no_leaky_mangling_in_function(self): + ns = run_code(""" + def f(): + class X[T]: pass + _X_foo = 2 + __foo = 1 + assert locals()['__foo'] == 1 + return __foo + """) + self.assertEqual(ns["f"](), 1) + + def test_no_leaky_mangling_in_class(self): + ns = run_code(""" + class Outer: + __before = "before" + class Inner[T]: + __x = "inner" + __after = "after" + """) + Outer = ns["Outer"] + self.assertEqual(Outer._Outer__before, "before") + self.assertEqual(Outer.Inner._Inner__x, "inner") + self.assertEqual(Outer._Outer__after, "after") + + def test_no_mangling_in_bases(self): + ns = run_code(""" + class __Base: + def __init_subclass__(self, **kwargs): + self.kwargs = kwargs + + class Derived[T](__Base, __kwarg=1): + pass + """) + Derived = ns["Derived"] + self.assertEqual(Derived.__bases__, (ns["__Base"], Generic)) + self.assertEqual(Derived.kwargs, {"__kwarg": 1}) + + def test_no_mangling_in_nested_scopes(self): + ns = run_code(""" + from test.test_type_params import make_base + + class __X: + pass + + class Y[T: __X]( + make_base(lambda: __X), + # doubly nested scope + make_base(lambda: (lambda: __X)), + # list comprehension + make_base([__X for _ in (1,)]), + # genexp + make_base(__X for _ in (1,)), + ): + pass + """) + Y = ns["Y"] + T, = Y.__type_params__ + self.assertIs(T.__bound__, ns["__X"]) + base0 = Y.__bases__[0] + self.assertIs(base0.__arg__(), ns["__X"]) + base1 = Y.__bases__[1] + self.assertIs(base1.__arg__()(), ns["__X"]) + base2 = Y.__bases__[2] + self.assertEqual(base2.__arg__, [ns["__X"]]) + base3 = Y.__bases__[3] + self.assertEqual(list(base3.__arg__), [ns["__X"]]) + + def test_type_params_are_mangled(self): + ns = run_code(""" + from test.test_type_params import make_base + + class Foo[__T, __U: __T](make_base(__T), make_base(lambda: __T)): + param = __T + """) + Foo = ns["Foo"] + T, U = Foo.__type_params__ + self.assertEqual(T.__name__, "__T") + self.assertEqual(U.__name__, "__U") + self.assertIs(U.__bound__, T) + self.assertIs(Foo.param, T) + + base1, base2, *_ = Foo.__bases__ + self.assertIs(base1.__arg__, T) + self.assertIs(base2.__arg__(), T) + class TypeParamsComplexCallsTest(unittest.TestCase): def test_defaults(self): diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py index bd116bb1ab7213..1ab7d35e2401c3 100644 --- a/Lib/test/test_typing.py +++ b/Lib/test/test_typing.py @@ -45,7 +45,7 @@ import weakref import types -from test.support import captured_stderr, cpython_only, infinite_recursion +from test.support import captured_stderr, cpython_only, infinite_recursion, requires_docstrings, import_helper from test.typinganndata import ann_module695, mod_generics_cache, _typed_dict_helper @@ -668,6 +668,23 @@ class X(Generic[*Ts, T]): ... with self.assertRaises(TypeError): class Y(Generic[*Ts_default, T]): ... + def test_allow_default_after_non_default_in_alias(self): + T_default = TypeVar('T_default', default=int) + T = TypeVar('T') + Ts = TypeVarTuple('Ts') + + a1 = Callable[[T_default], T] + self.assertEqual(a1.__args__, (T_default, T)) + + a2 = dict[T_default, T] + self.assertEqual(a2.__args__, (T_default, T)) + + a3 = typing.Dict[T_default, T] + self.assertEqual(a3.__args__, (T_default, T)) + + a4 = Callable[*Ts, T] + self.assertEqual(a4.__args__, (*Ts, T)) + def test_paramspec_specialization(self): T = TypeVar("T") P = ParamSpec('P', default=[str, int]) @@ -6308,6 +6325,8 @@ def test_or(self): self.assertEqual(X | "x", Union[X, "x"]) self.assertEqual("x" | X, Union["x", X]) + +class InternalsTests(BaseTestCase): def test_deprecation_for_no_type_params_passed_to__evaluate(self): with self.assertWarnsRegex( DeprecationWarning, @@ -6333,6 +6352,15 @@ def test_deprecation_for_no_type_params_passed_to__evaluate(self): self.assertEqual(cm.filename, __file__) + def test_collect_parameters(self): + typing = import_helper.import_fresh_module("typing") + with self.assertWarnsRegex( + DeprecationWarning, + "The private _collect_parameters function is deprecated" + ) as cm: + typing._collect_parameters + self.assertEqual(cm.filename, __file__) + @lru_cache() def cached_func(x, y): @@ -7032,24 +7060,16 @@ def test_iterator(self): self.assertNotIsInstance(42, typing.Iterator) def test_awaitable(self): - ns = {} - exec( - "async def foo() -> typing.Awaitable[int]:\n" - " return await AwaitableWrapper(42)\n", - globals(), ns) - foo = ns['foo'] + async def foo() -> typing.Awaitable[int]: + return await AwaitableWrapper(42) g = foo() self.assertIsInstance(g, typing.Awaitable) self.assertNotIsInstance(foo, typing.Awaitable) g.send(None) # Run foo() till completion, to avoid warning. def test_coroutine(self): - ns = {} - exec( - "async def foo():\n" - " return\n", - globals(), ns) - foo = ns['foo'] + async def foo(): + return g = foo() self.assertIsInstance(g, typing.Coroutine) with self.assertRaises(TypeError): @@ -7334,10 +7354,9 @@ def test_no_generator_instantiation(self): typing.Generator[int, int, int]() def test_async_generator(self): - ns = {} - exec("async def f():\n" - " yield 42\n", globals(), ns) - g = ns['f']() + async def f(): + yield 42 + g = f() self.assertIsSubclass(type(g), typing.AsyncGenerator) def test_no_async_generator_instantiation(self): @@ -7424,9 +7443,8 @@ def asend(self, value): def athrow(self, typ, val=None, tb=None): pass - ns = {} - exec('async def g(): yield 0', globals(), ns) - g = ns['g'] + async def g(): yield 0 + self.assertIsSubclass(G, typing.AsyncGenerator) self.assertIsSubclass(G, typing.AsyncIterable) self.assertIsSubclass(G, collections.abc.AsyncGenerator) @@ -10230,15 +10248,34 @@ def test_pickling(self): def test_constructor(self): self.assertIs(NoDefault, type(NoDefault)()) with self.assertRaises(TypeError): - NoDefault(1) + type(NoDefault)(1) def test_repr(self): self.assertEqual(repr(NoDefault), 'typing.NoDefault') + @requires_docstrings + def test_doc(self): + self.assertIsInstance(NoDefault.__doc__, str) + + def test_class(self): + self.assertIs(NoDefault.__class__, type(NoDefault)) + def test_no_call(self): with self.assertRaises(TypeError): NoDefault() + def test_no_attributes(self): + with self.assertRaises(AttributeError): + NoDefault.foo = 3 + with self.assertRaises(AttributeError): + NoDefault.foo + + # TypeError is consistent with the behavior of NoneType + with self.assertRaises(TypeError): + type(NoDefault).foo = 3 + with self.assertRaises(AttributeError): + type(NoDefault).foo + class AllTests(BaseTestCase): """Tests for __all__.""" diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py index 236b6e4516490a..4faad733245df9 100644 --- a/Lib/test/test_urlparse.py +++ b/Lib/test/test_urlparse.py @@ -103,7 +103,9 @@ class UrlParseTestCase(unittest.TestCase): - def checkRoundtrips(self, url, parsed, split): + def checkRoundtrips(self, url, parsed, split, url2=None): + if url2 is None: + url2 = url result = urllib.parse.urlparse(url) self.assertSequenceEqual(result, parsed) t = (result.scheme, result.netloc, result.path, @@ -111,7 +113,7 @@ def checkRoundtrips(self, url, parsed, split): self.assertSequenceEqual(t, parsed) # put it back together and it should be the same result2 = urllib.parse.urlunparse(result) - self.assertSequenceEqual(result2, url) + self.assertSequenceEqual(result2, url2) self.assertSequenceEqual(result2, result.geturl()) # the result of geturl() is a fixpoint; we can always parse it @@ -137,7 +139,7 @@ def checkRoundtrips(self, url, parsed, split): result.query, result.fragment) self.assertSequenceEqual(t, split) result2 = urllib.parse.urlunsplit(result) - self.assertSequenceEqual(result2, url) + self.assertSequenceEqual(result2, url2) self.assertSequenceEqual(result2, result.geturl()) # check the fixpoint property of re-parsing the result of geturl() @@ -175,9 +177,45 @@ def test_qs(self): def test_roundtrips(self): str_cases = [ + ('path/to/file', + ('', '', 'path/to/file', '', '', ''), + ('', '', 'path/to/file', '', '')), + ('/path/to/file', + ('', '', '/path/to/file', '', '', ''), + ('', '', '/path/to/file', '', '')), + ('//path/to/file', + ('', 'path', '/to/file', '', '', ''), + ('', 'path', '/to/file', '', '')), + ('////path/to/file', + ('', '', '//path/to/file', '', '', ''), + ('', '', '//path/to/file', '', '')), + ('/////path/to/file', + ('', '', '///path/to/file', '', '', ''), + ('', '', '///path/to/file', '', '')), + ('scheme:path/to/file', + ('scheme', '', 'path/to/file', '', '', ''), + ('scheme', '', 'path/to/file', '', '')), + ('scheme:/path/to/file', + ('scheme', '', '/path/to/file', '', '', ''), + ('scheme', '', '/path/to/file', '', '')), + ('scheme://path/to/file', + ('scheme', 'path', '/to/file', '', '', ''), + ('scheme', 'path', '/to/file', '', '')), + ('scheme:////path/to/file', + ('scheme', '', '//path/to/file', '', '', ''), + ('scheme', '', '//path/to/file', '', '')), + ('scheme://///path/to/file', + ('scheme', '', '///path/to/file', '', '', ''), + ('scheme', '', '///path/to/file', '', '')), ('file:///tmp/junk.txt', ('file', '', '/tmp/junk.txt', '', '', ''), ('file', '', '/tmp/junk.txt', '', '')), + ('file:////tmp/junk.txt', + ('file', '', '//tmp/junk.txt', '', '', ''), + ('file', '', '//tmp/junk.txt', '', '')), + ('file://///tmp/junk.txt', + ('file', '', '///tmp/junk.txt', '', '', ''), + ('file', '', '///tmp/junk.txt', '', '')), ('imap://mail.python.org/mbox1', ('imap', 'mail.python.org', '/mbox1', '', '', ''), ('imap', 'mail.python.org', '/mbox1', '', '')), @@ -204,15 +242,58 @@ def test_roundtrips(self): 'action=download-manifest&url=https://example.com/app', ''), ('itms-services', '', '', 'action=download-manifest&url=https://example.com/app', '')), + ('+scheme:path/to/file', + ('', '', '+scheme:path/to/file', '', '', ''), + ('', '', '+scheme:path/to/file', '', '')), + ('sch_me:path/to/file', + ('', '', 'sch_me:path/to/file', '', '', ''), + ('', '', 'sch_me:path/to/file', '', '')), ] def _encode(t): return (t[0].encode('ascii'), tuple(x.encode('ascii') for x in t[1]), tuple(x.encode('ascii') for x in t[2])) bytes_cases = [_encode(x) for x in str_cases] + str_cases += [ + ('schème:path/to/file', + ('', '', 'schème:path/to/file', '', '', ''), + ('', '', 'schème:path/to/file', '', '')), + ] for url, parsed, split in str_cases + bytes_cases: self.checkRoundtrips(url, parsed, split) + def test_roundtrips_normalization(self): + str_cases = [ + ('///path/to/file', + '/path/to/file', + ('', '', '/path/to/file', '', '', ''), + ('', '', '/path/to/file', '', '')), + ('scheme:///path/to/file', + 'scheme:/path/to/file', + ('scheme', '', '/path/to/file', '', '', ''), + ('scheme', '', '/path/to/file', '', '')), + ('file:/tmp/junk.txt', + 'file:///tmp/junk.txt', + ('file', '', '/tmp/junk.txt', '', '', ''), + ('file', '', '/tmp/junk.txt', '', '')), + ('http:/tmp/junk.txt', + 'http:///tmp/junk.txt', + ('http', '', '/tmp/junk.txt', '', '', ''), + ('http', '', '/tmp/junk.txt', '', '')), + ('https:/tmp/junk.txt', + 'https:///tmp/junk.txt', + ('https', '', '/tmp/junk.txt', '', '', ''), + ('https', '', '/tmp/junk.txt', '', '')), + ] + def _encode(t): + return (t[0].encode('ascii'), + t[1].encode('ascii'), + tuple(x.encode('ascii') for x in t[2]), + tuple(x.encode('ascii') for x in t[3])) + bytes_cases = [_encode(x) for x in str_cases] + for url, url2, parsed, split in str_cases + bytes_cases: + self.checkRoundtrips(url, parsed, split, url2) + def test_http_roundtrips(self): # urllib.parse.urlsplit treats 'http:' as an optimized special case, # so we test both 'http:' and 'https:' in all the following. diff --git a/Lib/test/test_venv.py b/Lib/test/test_venv.py index 668642f73e8d9f..1769ed61b94075 100644 --- a/Lib/test/test_venv.py +++ b/Lib/test/test_venv.py @@ -24,7 +24,7 @@ requires_venv_with_pip, TEST_HOME_DIR, requires_resource, copy_python_src_ignore) from test.support.os_helper import (can_symlink, EnvironmentVarGuard, rmtree, - TESTFN) + TESTFN, FakePath) import unittest import venv from unittest.mock import patch, Mock @@ -125,12 +125,12 @@ def test_defaults_with_str_path(self): self.run_with_capture(venv.create, self.env_dir) self._check_output_of_default_create() - def test_defaults_with_pathlib_path(self): + def test_defaults_with_pathlike(self): """ - Test the create function with default arguments and a pathlib.Path path. + Test the create function with default arguments and a path-like path. """ rmtree(self.env_dir) - self.run_with_capture(venv.create, pathlib.Path(self.env_dir)) + self.run_with_capture(venv.create, FakePath(self.env_dir)) self._check_output_of_default_create() def _check_output_of_default_create(self): @@ -572,7 +572,7 @@ def test_pathsep_error(self): rmtree(self.env_dir) bad_itempath = self.env_dir + os.pathsep self.assertRaises(ValueError, venv.create, bad_itempath) - self.assertRaises(ValueError, venv.create, pathlib.Path(bad_itempath)) + self.assertRaises(ValueError, venv.create, FakePath(bad_itempath)) @unittest.skipIf(os.name == 'nt', 'not relevant on Windows') @requireVenvCreate diff --git a/Lib/test/test_weakref.py b/Lib/test/test_weakref.py index 16da24d7805b56..ef2fe92cc219b6 100644 --- a/Lib/test/test_weakref.py +++ b/Lib/test/test_weakref.py @@ -82,7 +82,7 @@ def callback(self, ref): @contextlib.contextmanager -def collect_in_thread(period=0.0001): +def collect_in_thread(period=0.005): """ Ensure GC collections happen in a different thread, at a high frequency. """ diff --git a/Lib/test/test_winapi.py b/Lib/test/test_winapi.py index 2ac6f3621710cd..73b8228ddc97ba 100644 --- a/Lib/test/test_winapi.py +++ b/Lib/test/test_winapi.py @@ -7,7 +7,7 @@ import threading import time import unittest -from test.support import import_helper +from test.support import import_helper, os_helper _winapi = import_helper.import_module('_winapi', required_on=['win']) @@ -127,3 +127,35 @@ def test_getshortpathname(self): # Should contain "PROGRA~" but we can't predict the number self.assertIsNotNone(re.match(r".\:\\PROGRA~\d", actual.upper()), actual) + + def test_namedpipe(self): + pipe_name = rf"\\.\pipe\LOCAL\{os_helper.TESTFN}" + + # Pipe does not exist, so this raises + with self.assertRaises(FileNotFoundError): + _winapi.WaitNamedPipe(pipe_name, 0) + + pipe = _winapi.CreateNamedPipe( + pipe_name, + _winapi.PIPE_ACCESS_DUPLEX, + 8, # 8=PIPE_REJECT_REMOTE_CLIENTS + 2, # two instances available + 32, 32, 0, 0) + self.addCleanup(_winapi.CloseHandle, pipe) + + # Pipe instance is available, so this passes + _winapi.WaitNamedPipe(pipe_name, 0) + + with open(pipe_name, 'w+b') as pipe2: + # No instances available, so this times out + # (WinError 121 does not get mapped to TimeoutError) + with self.assertRaises(OSError): + _winapi.WaitNamedPipe(pipe_name, 0) + + _winapi.WriteFile(pipe, b'testdata') + self.assertEqual(b'testdata', pipe2.read(8)) + + self.assertEqual((b'', 0), _winapi.PeekNamedPipe(pipe, 8)[:2]) + pipe2.write(b'testdata') + pipe2.flush() + self.assertEqual((b'testdata', 8), _winapi.PeekNamedPipe(pipe, 8)[:2]) diff --git a/Lib/test/test_winsound.py b/Lib/test/test_winsound.py index a59d0d24f5db48..870ab7bd41d8ce 100644 --- a/Lib/test/test_winsound.py +++ b/Lib/test/test_winsound.py @@ -1,12 +1,13 @@ # Ridiculously simple test of the winsound module for Windows. import functools -import pathlib +import os import time import unittest from test import support from test.support import import_helper +from test.support import os_helper support.requires('audio') @@ -85,13 +86,6 @@ def test_keyword_args(self): safe_MessageBeep(type=winsound.MB_OK) -# A class for testing winsound when the given path resolves -# to bytes rather than str. -class BytesPath(pathlib.WindowsPath): - def __fspath__(self): - return bytes(super().__fspath__(), 'UTF-8') - - class PlaySoundTest(unittest.TestCase): def test_errors(self): @@ -126,7 +120,7 @@ def test_snd_filename(self): def test_snd_filepath(self): fn = support.findfile('pluck-pcm8.wav', subdir='audiodata') - path = pathlib.Path(fn) + path = os_helper.FakePath(fn) safe_PlaySound(path, winsound.SND_FILENAME | winsound.SND_NODEFAULT) def test_snd_filepath_as_bytes(self): @@ -134,7 +128,7 @@ def test_snd_filepath_as_bytes(self): self.assertRaises( TypeError, winsound.PlaySound, - BytesPath(fn), + os_helper.FakePath(os.fsencode(fn)), winsound.SND_FILENAME | winsound.SND_NODEFAULT ) diff --git a/Lib/test/test_zipapp.py b/Lib/test/test_zipapp.py index f1c6b2d97621ee..00a5ed6626ddc5 100644 --- a/Lib/test/test_zipapp.py +++ b/Lib/test/test_zipapp.py @@ -265,14 +265,15 @@ def test_write_shebang_to_fileobj(self): zipapp.create_archive(str(target), new_target, interpreter='python2.7') self.assertTrue(new_target.getvalue().startswith(b'#!python2.7\n')) - def test_read_from_pathobj(self): - # Test that we can copy an archive using a pathlib.Path object + def test_read_from_pathlike_obj(self): + # Test that we can copy an archive using a path-like object # for the source. source = self.tmpdir / 'source' source.mkdir() (source / '__main__.py').touch() - target1 = self.tmpdir / 'target1.pyz' - target2 = self.tmpdir / 'target2.pyz' + source = os_helper.FakePath(str(source)) + target1 = os_helper.FakePath(str(self.tmpdir / 'target1.pyz')) + target2 = os_helper.FakePath(str(self.tmpdir / 'target2.pyz')) zipapp.create_archive(source, target1, interpreter='python') zipapp.create_archive(target1, target2, interpreter='python2.7') self.assertEqual(zipapp.get_interpreter(target2), 'python2.7') diff --git a/Lib/test/test_zipfile/_path/test_complexity.py b/Lib/test/test_zipfile/_path/test_complexity.py index fd7ce57551b7a5..b505dd7c376462 100644 --- a/Lib/test/test_zipfile/_path/test_complexity.py +++ b/Lib/test/test_zipfile/_path/test_complexity.py @@ -20,7 +20,7 @@ class TestComplexity(unittest.TestCase): @pytest.mark.flaky def test_implied_dirs_performance(self): best, others = big_o.big_o( - compose(consume, zipfile.CompleteDirs._implied_dirs), + compose(consume, zipfile._path.CompleteDirs._implied_dirs), lambda size: [ '/'.join(string.ascii_lowercase + str(n)) for n in range(size) ], diff --git a/Lib/test/test_zipfile/_path/test_path.py b/Lib/test/test_zipfile/_path/test_path.py index df5b8c9d8fea40..99842ffd63a64e 100644 --- a/Lib/test/test_zipfile/_path/test_path.py +++ b/Lib/test/test_zipfile/_path/test_path.py @@ -3,30 +3,36 @@ import contextlib import pathlib import pickle +import stat import sys import unittest import zipfile import zipfile._path +from test.support.os_helper import temp_dir, FakePath + from ._functools import compose from ._itertools import Counter from ._test_params import parameterize, Invoked -from test.support.os_helper import temp_dir - class jaraco: class itertools: Counter = Counter +def _make_link(info: zipfile.ZipInfo): # type: ignore[name-defined] + info.external_attr |= stat.S_IFLNK << 16 + + def build_alpharep_fixture(): """ Create a zip file with this structure: . ├── a.txt + ├── n.txt (-> a.txt) ├── b │ ├── c.txt │ ├── d @@ -47,6 +53,7 @@ def build_alpharep_fixture(): - multiple files in a directory (b/c, b/f) - a directory containing only a directory (g/h) - a directory with files of different extensions (j/klm) + - a symlink (n) pointing to (a) "alpha" because it uses alphabet "rep" because it's a representative example @@ -61,6 +68,9 @@ def build_alpharep_fixture(): zf.writestr("j/k.bin", b"content of k") zf.writestr("j/l.baz", b"content of l") zf.writestr("j/m.bar", b"content of m") + zf.writestr("n.txt", b"a.txt") + _make_link(zf.infolist()[-1]) + zf.filename = "alpharep.zip" return zf @@ -91,7 +101,7 @@ def zipfile_ondisk(self, alpharep): def test_iterdir_and_types(self, alpharep): root = zipfile.Path(alpharep) assert root.is_dir() - a, b, g, j = root.iterdir() + a, k, b, g, j = root.iterdir() assert a.is_file() assert b.is_dir() assert g.is_dir() @@ -111,7 +121,7 @@ def test_is_file_missing(self, alpharep): @pass_alpharep def test_iterdir_on_file(self, alpharep): root = zipfile.Path(alpharep) - a, b, g, j = root.iterdir() + a, k, b, g, j = root.iterdir() with self.assertRaises(ValueError): a.iterdir() @@ -126,7 +136,7 @@ def test_subdir_is_dir(self, alpharep): @pass_alpharep def test_open(self, alpharep): root = zipfile.Path(alpharep) - a, b, g, j = root.iterdir() + a, k, b, g, j = root.iterdir() with a.open(encoding="utf-8") as strm: data = strm.read() self.assertEqual(data, "content of a") @@ -230,7 +240,7 @@ def test_open_missing_directory(self, alpharep): @pass_alpharep def test_read(self, alpharep): root = zipfile.Path(alpharep) - a, b, g, j = root.iterdir() + a, k, b, g, j = root.iterdir() assert a.read_text(encoding="utf-8") == "content of a" # Also check positional encoding arg (gh-101144). assert a.read_text("utf-8") == "content of a" @@ -264,13 +274,13 @@ def test_pathlike_construction(self, alpharep): zipfile.Path should be constructable from a path-like object """ zipfile_ondisk = self.zipfile_ondisk(alpharep) - pathlike = pathlib.Path(str(zipfile_ondisk)) + pathlike = FakePath(str(zipfile_ondisk)) zipfile.Path(pathlike) @pass_alpharep def test_traverse_pathlike(self, alpharep): root = zipfile.Path(alpharep) - root / pathlib.Path("a") + root / FakePath("a") @pass_alpharep def test_parent(self, alpharep): @@ -296,7 +306,7 @@ def test_mutability(self, alpharep): reflect that change. """ root = zipfile.Path(alpharep) - a, b, g, j = root.iterdir() + a, k, b, g, j = root.iterdir() alpharep.writestr('foo.txt', 'foo') alpharep.writestr('bar/baz.txt', 'baz') assert any(child.name == 'foo.txt' for child in root.iterdir()) @@ -513,12 +523,9 @@ def test_eq_hash(self, alpharep): @pass_alpharep def test_is_symlink(self, alpharep): - """ - See python/cpython#82102 for symlink support beyond this object. - """ - root = zipfile.Path(alpharep) - assert not root.is_symlink() + assert not root.joinpath('a.txt').is_symlink() + assert root.joinpath('n.txt').is_symlink() @pass_alpharep def test_relative_to(self, alpharep): @@ -539,12 +546,12 @@ def test_inheritance(self, alpharep): ['alpharep', 'path_type', 'subpath'], itertools.product( alpharep_generators, - [str, pathlib.Path], + [str, FakePath], ['', 'b/'], ), ) def test_pickle(self, alpharep, path_type, subpath): - zipfile_ondisk = path_type(self.zipfile_ondisk(alpharep)) + zipfile_ondisk = path_type(str(self.zipfile_ondisk(alpharep))) saved_1 = pickle.dumps(zipfile.Path(zipfile_ondisk, at=subpath)) restored_1 = pickle.loads(saved_1) diff --git a/Lib/test/test_zoneinfo/test_zoneinfo.py b/Lib/test/test_zoneinfo/test_zoneinfo.py index 8414721555731e..8bcd6d2e9951b9 100644 --- a/Lib/test/test_zoneinfo/test_zoneinfo.py +++ b/Lib/test/test_zoneinfo/test_zoneinfo.py @@ -17,7 +17,7 @@ from datetime import date, datetime, time, timedelta, timezone from functools import cached_property -from test.support import MISSING_C_DOCSTRINGS +from test.support import MISSING_C_DOCSTRINGS, requires_gil_enabled from test.test_zoneinfo import _support as test_support from test.test_zoneinfo._support import OS_ENV_LOCK, TZPATH_TEST_LOCK, ZoneInfoTestBase from test.support.import_helper import import_module, CleanImport @@ -1931,6 +1931,7 @@ def test_cache_location(self): self.assertFalse(hasattr(c_zoneinfo.ZoneInfo, "_weak_cache")) self.assertTrue(hasattr(py_zoneinfo.ZoneInfo, "_weak_cache")) + @requires_gil_enabled("gh-117783: types may be immortalized") def test_gc_tracked(self): import gc diff --git a/Lib/tkinter/__init__.py b/Lib/tkinter/__init__.py index daecf4eb2ea522..5352276e874bf5 100644 --- a/Lib/tkinter/__init__.py +++ b/Lib/tkinter/__init__.py @@ -40,7 +40,7 @@ from tkinter.constants import * import re -wantobjects = 2 +wantobjects = 1 _debug = False # set to True to print executed Tcl/Tk commands TkVersion = float(_tkinter.TK_VERSION) @@ -1727,6 +1727,9 @@ def getint_event(s): except (ValueError, TclError): return s + if any(isinstance(s, tuple) for s in args): + args = [s[0] if isinstance(s, tuple) and len(s) == 1 else s + for s in args] nsign, b, f, h, k, s, t, w, x, y, A, E, K, N, W, T, X, Y, D = args # Missing: (a, c, d, m, o, v, B, R) e = Event() diff --git a/Lib/trace.py b/Lib/trace.py index 7886959fa64f68..64fc8037e355ee 100755 --- a/Lib/trace.py +++ b/Lib/trace.py @@ -565,8 +565,12 @@ def localtrace_trace_and_count(self, frame, why, arg): if self.start_time: print('%.2f' % (_time() - self.start_time), end=' ') bname = os.path.basename(filename) - print("%s(%d): %s" % (bname, lineno, - linecache.getline(filename, lineno)), end='') + line = linecache.getline(filename, lineno) + print("%s(%d)" % (bname, lineno), end='') + if line: + print(": ", line, end='') + else: + print() return self.localtrace def localtrace_trace(self, frame, why, arg): @@ -578,8 +582,12 @@ def localtrace_trace(self, frame, why, arg): if self.start_time: print('%.2f' % (_time() - self.start_time), end=' ') bname = os.path.basename(filename) - print("%s(%d): %s" % (bname, lineno, - linecache.getline(filename, lineno)), end='') + line = linecache.getline(filename, lineno) + print("%s(%d)" % (bname, lineno), end='') + if line: + print(": ", line, end='') + else: + print() return self.localtrace def localtrace_count(self, frame, why, arg): diff --git a/Lib/traceback.py b/Lib/traceback.py index 9401b461497cc1..6ee1a50ca6804a 100644 --- a/Lib/traceback.py +++ b/Lib/traceback.py @@ -155,7 +155,7 @@ def format_exception(exc, /, value=_sentinel, tb=_sentinel, limit=None, \ return list(te.format(chain=chain, colorize=colorize)) -def format_exception_only(exc, /, value=_sentinel, *, show_group=False): +def format_exception_only(exc, /, value=_sentinel, *, show_group=False, **kwargs): """Format the exception part of a traceback. The return value is a list of strings, each ending in a newline. @@ -170,10 +170,11 @@ def format_exception_only(exc, /, value=_sentinel, *, show_group=False): :exc:`BaseExceptionGroup`, the nested exceptions are included as well, recursively, with indentation relative to their nesting depth. """ + colorize = kwargs.get("colorize", False) if value is _sentinel: value = exc te = TracebackException(type(value), value, None, compact=True) - return list(te.format_exception_only(show_group=show_group)) + return list(te.format_exception_only(show_group=show_group, colorize=colorize)) # -- not official API but folk probably use these two functions. @@ -579,7 +580,7 @@ def format_frame_summary(self, frame_summary, **kwargs): show_carets = False with suppress(Exception): anchors = _extract_caret_anchors_from_line_segment(segment) - show_carets = self.should_show_carets(start_offset, end_offset, all_lines, anchors) + show_carets = self._should_show_carets(start_offset, end_offset, all_lines, anchors) result = [] @@ -693,7 +694,7 @@ def output_line(lineno): return ''.join(row) - def should_show_carets(self, start_offset, end_offset, all_lines, anchors): + def _should_show_carets(self, start_offset, end_offset, all_lines, anchors): with suppress(SyntaxError, ImportError): import ast tree = ast.parse('\n'.join(all_lines)) diff --git a/Lib/turtledemo/__main__.py b/Lib/turtledemo/__main__.py index 2ab6c15e2c079e..731f98b02b17a7 100755 --- a/Lib/turtledemo/__main__.py +++ b/Lib/turtledemo/__main__.py @@ -92,13 +92,15 @@ from idlelib.colorizer import ColorDelegator, color_config from idlelib.percolator import Percolator from idlelib.textview import view_text +import turtle from turtledemo import __doc__ as about_turtledemo -import turtle +if sys.platform == 'win32': + from idlelib.util import fix_win_hidpi + fix_win_hidpi() demo_dir = os.path.dirname(os.path.abspath(__file__)) darwin = sys.platform == 'darwin' - STARTUP = 1 READY = 2 RUNNING = 3 diff --git a/Lib/typing.py b/Lib/typing.py index 8e61f50477bcc2..d09b3208f9d691 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -257,15 +257,15 @@ def _type_repr(obj): return repr(obj) -def _collect_parameters(args): - """Collect all type variables and parameter specifications in args +def _collect_type_parameters(args, *, enforce_default_ordering: bool = True): + """Collect all type parameters in args in order of first appearance (lexicographic order). For example:: >>> P = ParamSpec('P') >>> T = TypeVar('T') - >>> _collect_parameters((T, Callable[P, T])) + >>> _collect_type_parameters((T, Callable[P, T])) (~T, ~P) """ # required type parameter cannot appear after parameter with default @@ -281,20 +281,21 @@ def _collect_parameters(args): # `t` might be a tuple, when `ParamSpec` is substituted with # `[T, int]`, or `[int, *Ts]`, etc. for x in t: - for collected in _collect_parameters([x]): + for collected in _collect_type_parameters([x]): if collected not in parameters: parameters.append(collected) elif hasattr(t, '__typing_subst__'): if t not in parameters: - if type_var_tuple_encountered and t.has_default(): - raise TypeError('Type parameter with a default' - ' follows TypeVarTuple') + if enforce_default_ordering: + if type_var_tuple_encountered and t.has_default(): + raise TypeError('Type parameter with a default' + ' follows TypeVarTuple') - if t.has_default(): - default_encountered = True - elif default_encountered: - raise TypeError(f'Type parameter {t!r} without a default' - ' follows type parameter with a default') + if t.has_default(): + default_encountered = True + elif default_encountered: + raise TypeError(f'Type parameter {t!r} without a default' + ' follows type parameter with a default') parameters.append(t) else: @@ -320,7 +321,7 @@ def _check_generic_specialization(cls, arguments): if actual_len < expected_len: # If the parameter at index `actual_len` in the parameters list # has a default, then all parameters after it must also have - # one, because we validated as much in _collect_parameters(). + # one, because we validated as much in _collect_type_parameters(). # That means that no error needs to be raised here, despite # the number of arguments being passed not matching the number # of parameters: all parameters that aren't explicitly @@ -1255,7 +1256,7 @@ def _generic_init_subclass(cls, *args, **kwargs): if error: raise TypeError("Cannot inherit from plain Generic") if '__orig_bases__' in cls.__dict__: - tvars = _collect_parameters(cls.__orig_bases__) + tvars = _collect_type_parameters(cls.__orig_bases__) # Look for Generic[T1, ..., Tn]. # If found, tvars must be a subset of it. # If not found, tvars is it. @@ -1416,7 +1417,11 @@ def __init__(self, origin, args, *, inst=True, name=None): args = (args,) self.__args__ = tuple(... if a is _TypingEllipsis else a for a in args) - self.__parameters__ = _collect_parameters(args) + enforce_default_ordering = origin in (Generic, Protocol) + self.__parameters__ = _collect_type_parameters( + args, + enforce_default_ordering=enforce_default_ordering, + ) if not name: self.__module__ = origin.__module__ @@ -3784,6 +3789,16 @@ def __getattr__(attr): elif attr in {"ContextManager", "AsyncContextManager"}: import contextlib obj = _alias(getattr(contextlib, f"Abstract{attr}"), 2, name=attr, defaults=(bool | None,)) + elif attr == "_collect_parameters": + import warnings + + depr_message = ( + "The private _collect_parameters function is deprecated and will be" + " removed in a future version of Python. Any use of private functions" + " is discouraged and may break in the future." + ) + warnings.warn(depr_message, category=DeprecationWarning, stacklevel=2) + obj = _collect_type_parameters else: raise AttributeError(f"module {__name__!r} has no attribute {attr!r}") globals()[attr] = obj diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py index fc9e7c99f283be..3932bb99c7e7d1 100644 --- a/Lib/urllib/parse.py +++ b/Lib/urllib/parse.py @@ -525,7 +525,7 @@ def urlunsplit(components): empty query; the RFC states that these are equivalent).""" scheme, netloc, url, query, fragment, _coerce_result = ( _coerce_args(*components)) - if netloc or (scheme and scheme in uses_netloc and url[:2] != '//'): + if netloc or (scheme and scheme in uses_netloc) or url[:2] == '//': if url and url[:1] != '/': url = '/' + url url = '//' + (netloc or '') + url if scheme: diff --git a/Lib/zipfile/_path/__init__.py b/Lib/zipfile/_path/__init__.py index 79ebb777354e03..f5ea18cee61930 100644 --- a/Lib/zipfile/_path/__init__.py +++ b/Lib/zipfile/_path/__init__.py @@ -5,6 +5,7 @@ import contextlib import pathlib import re +import stat import sys from .glob import Translator @@ -390,9 +391,11 @@ def match(self, path_pattern): def is_symlink(self): """ - Return whether this path is a symlink. Always false (python/cpython#82102). + Return whether this path is a symlink. """ - return False + info = self.root.getinfo(self.at) + mode = info.external_attr >> 16 + return stat.S_ISLNK(mode) def glob(self, pattern): if not pattern: diff --git a/Lib/zipimport.py b/Lib/zipimport.py index 4e41d109865e85..a49a21f0799df2 100644 --- a/Lib/zipimport.py +++ b/Lib/zipimport.py @@ -1,11 +1,9 @@ """zipimport provides support for importing Python modules from Zip archives. -This module exports three objects: +This module exports two objects: - zipimporter: a class; its constructor takes a path to a Zip archive. - ZipImportError: exception raised by zipimporter objects. It's a subclass of ImportError, so it can be caught as ImportError, too. -- _zip_directory_cache: a dict, mapping archive paths to zip directory - info dicts, as used in zipimporter._files. It is usually not needed to use the zipimport module explicitly; it is used by the builtin import mechanism for sys.path items that are paths diff --git a/Makefile.pre.in b/Makefile.pre.in index 74a438b015a97d..a8a57d2655846d 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -40,6 +40,7 @@ LINKCC= @LINKCC@ AR= @AR@ READELF= @READELF@ SOABI= @SOABI@ +ABIFLAGS= @ABIFLAGS@ LDVERSION= @LDVERSION@ MODULE_LDFLAGS=@MODULE_LDFLAGS@ GITVERSION= @GITVERSION@ @@ -150,7 +151,6 @@ INCLUDEDIR= @includedir@ CONFINCLUDEDIR= $(exec_prefix)/include PLATLIBDIR= @PLATLIBDIR@ SCRIPTDIR= $(prefix)/$(PLATLIBDIR) -ABIFLAGS= @ABIFLAGS@ # executable name for shebangs EXENAME= $(BINDIR)/python$(LDVERSION)$(EXE) # Variable used by ensurepip @@ -631,7 +631,9 @@ LIBEXPAT_HEADERS= \ Modules/expat/utf8tab.h \ Modules/expat/xmlrole.h \ Modules/expat/xmltok.h \ - Modules/expat/xmltok_impl.h + Modules/expat/xmltok_impl.h \ + Modules/expat/xmltok_impl.c \ + Modules/expat/xmltok_ns.c ########################################################################## # hashlib's HACL* library @@ -2259,10 +2261,10 @@ bininstall: commoninstall altbininstall -if test "$(VERSION)" != "$(LDVERSION)"; then \ rm -f $(DESTDIR)$(BINDIR)/python$(VERSION)-config; \ (cd $(DESTDIR)$(BINDIR); $(LN) -s python$(LDVERSION)-config python$(VERSION)-config); \ - rm -f $(DESTDIR)$(LIBPC)/python-$(LDVERSION).pc; \ - (cd $(DESTDIR)$(LIBPC); $(LN) -s python-$(VERSION).pc python-$(LDVERSION).pc); \ - rm -f $(DESTDIR)$(LIBPC)/python-$(LDVERSION)-embed.pc; \ - (cd $(DESTDIR)$(LIBPC); $(LN) -s python-$(VERSION)-embed.pc python-$(LDVERSION)-embed.pc); \ + rm -f $(DESTDIR)$(LIBPC)/python-$(VERSION).pc; \ + (cd $(DESTDIR)$(LIBPC); $(LN) -s python-$(LDVERSION).pc python-$(VERSION).pc); \ + rm -f $(DESTDIR)$(LIBPC)/python-$(VERSION)-embed.pc; \ + (cd $(DESTDIR)$(LIBPC); $(LN) -s python-$(LDVERSION)-embed.pc python-$(VERSION)-embed.pc); \ fi -rm -f $(DESTDIR)$(BINDIR)/python3-config (cd $(DESTDIR)$(BINDIR); $(LN) -s python$(VERSION)-config python3-config) @@ -2434,6 +2436,7 @@ TESTSUBDIRS= idlelib/idle_test \ test/test_importlib/resources/data03/namespace/portion1 \ test/test_importlib/resources/data03/namespace/portion2 \ test/test_importlib/resources/namespacedata01 \ + test/test_importlib/resources/namespacedata01/subdirectory \ test/test_importlib/resources/zipdata01 \ test/test_importlib/resources/zipdata02 \ test/test_importlib/source \ @@ -2447,6 +2450,7 @@ TESTSUBDIRS= idlelib/idle_test \ test/test_pathlib \ test/test_peg_generator \ test/test_pydoc \ + test/test_pyrepl \ test/test_sqlite3 \ test/test_tkinter \ test/test_tomllib \ @@ -2698,8 +2702,8 @@ libainstall: all scripts $(INSTALL_DATA) Modules/Setup.bootstrap $(DESTDIR)$(LIBPL)/Setup.bootstrap $(INSTALL_DATA) Modules/Setup.stdlib $(DESTDIR)$(LIBPL)/Setup.stdlib $(INSTALL_DATA) Modules/Setup.local $(DESTDIR)$(LIBPL)/Setup.local - $(INSTALL_DATA) Misc/python.pc $(DESTDIR)$(LIBPC)/python-$(VERSION).pc - $(INSTALL_DATA) Misc/python-embed.pc $(DESTDIR)$(LIBPC)/python-$(VERSION)-embed.pc + $(INSTALL_DATA) Misc/python.pc $(DESTDIR)$(LIBPC)/python-$(LDVERSION).pc + $(INSTALL_DATA) Misc/python-embed.pc $(DESTDIR)$(LIBPC)/python-$(LDVERSION)-embed.pc $(INSTALL_SCRIPT) $(srcdir)/Modules/makesetup $(DESTDIR)$(LIBPL)/makesetup $(INSTALL_SCRIPT) $(srcdir)/install-sh $(DESTDIR)$(LIBPL)/install-sh $(INSTALL_SCRIPT) python-config.py $(DESTDIR)$(LIBPL)/python-config.py diff --git a/Misc/ACKS b/Misc/ACKS index eaa7453aaade3e..9c10a76f1df624 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -2054,6 +2054,7 @@ Doug Wyatt Xiang Zhang Robert Xiao Florent Xicluna +Yanbo, Xie Xinhang Xu Arnon Yaari Alakshendra Yadav diff --git a/Misc/HISTORY b/Misc/HISTORY index b66413277259dc..8ca35e1af62c05 100644 --- a/Misc/HISTORY +++ b/Misc/HISTORY @@ -607,7 +607,7 @@ Library MemoryError. - Issue #18473: Fixed 2to3 and 3to2 compatible pickle mappings. Fixed - ambigious reverse mappings. Added many new mappings. Import mapping is no + ambiguous reverse mappings. Added many new mappings. Import mapping is no longer applied to modules already mapped with full name mapping. - Issue #23745: The new email header parser now handles duplicate MIME @@ -2030,7 +2030,7 @@ Library initialization of the unquote_to_bytes() table of the urllib.parse module, to not waste memory if these modules are not used. -- Issue #19157: Include the broadcast address in the usuable hosts for IPv6 +- Issue #19157: Include the broadcast address in the usable hosts for IPv6 in ipaddress. - Issue #11599: When an external command (e.g. compiler) fails, distutils now @@ -2620,7 +2620,7 @@ Library - asyncio: Various improvements and small changes not all covered by issues listed below. E.g. wait_for() now cancels the inner task if - the timeout occcurs; tweaked the set of exported symbols; renamed + the timeout occurs; tweaked the set of exported symbols; renamed Empty/Full to QueueEmpty/QueueFull; "with (yield from lock)" now uses a separate context manager; readexactly() raises if not enough data was read; PTY support tweaks. @@ -3944,7 +3944,7 @@ Library - Issue #18996: TestCase.assertEqual() now more cleverly shorten differing strings in error report. -- Issue #19034: repr() for tkinter.Tcl_Obj now exposes string reperesentation. +- Issue #19034: repr() for tkinter.Tcl_Obj now exposes string representation. - Issue #18978: ``urllib.request.Request`` now allows the method to be indicated on the class and no longer sets it to None in ``__init__``. @@ -4191,7 +4191,7 @@ Library - Issue #18532: Change the builtin hash algorithms' names to lower case names as promised by hashlib's documentation. -- Issue #8713: add new spwan and forkserver start methods, and new functions +- Issue #8713: add new spawn and forkserver start methods, and new functions get_all_start_methods, get_start_method, and set_start_method, to multiprocessing. @@ -4524,7 +4524,7 @@ Core and Builtins - Issue #16613: Add *m* argument to ``collections.Chainmap.new_child`` to allow the new child map to be specified explicitly. -- Issue #16730: importlib.machinery.FileFinder now no longers raises an +- Issue #16730: importlib.machinery.FileFinder now no longer raises an exception when trying to populate its cache and it finds out the directory is unreadable or has turned into a file. Reported and diagnosed by David Pritchard. @@ -4832,7 +4832,7 @@ Library on Windows and adds no value over and above python -m pydoc ... - Issue #18155: The csv module now correctly handles csv files that use - a delimter character that has a special meaning in regexes, instead of + a delimiter character that has a special meaning in regexes, instead of throwing an exception. - Issue #14360: encode_quopri can now be successfully used as an encoder @@ -6329,7 +6329,7 @@ Documentation - Issue #15940: Specify effect of locale on time functions. -- Issue #17538: Document XML vulnerabilties +- Issue #17538: Document XML vulnerabilities - Issue #16642: sched.scheduler timefunc initial default is time.monotonic. Patch by Ramchandra Apte @@ -6676,7 +6676,7 @@ Library - Issue #14669: Fix pickling of connections and sockets on Mac OS X by sending/receiving an acknowledgment after file descriptor transfer. - TestPicklingConnection has been reenabled for Mac OS X. + TestPicklingConnection has been re-enabled for Mac OS X. - Issue #11062: Fix adding a message from file to Babyl mailbox. @@ -7114,7 +7114,7 @@ Build - Issue #14330: For cross builds, don't use host python, use host search paths for host compiler. -- Issue #15235: Allow Berkley DB versions up to 5.3 to build the dbm module. +- Issue #15235: Allow Berkeley DB versions up to 5.3 to build the dbm module. - Issue #15268: Search curses.h in /usr/include/ncursesw. @@ -7264,7 +7264,7 @@ Library called with no arguments. - Issue #14653: email.utils.mktime_tz() no longer relies on system - mktime() when timezone offest is supplied. + mktime() when timezone offset is supplied. - Issue #14684: zlib.compressobj() and zlib.decompressobj() now support the use of predefined compression dictionaries. Original patch by Sam Rushing. @@ -7606,7 +7606,7 @@ Library - Issue #14773: Fix os.fwalk() failing on dangling symlinks. - Issue #12541: Be lenient with quotes around Realm field of HTTP Basic - Authentation in urllib2. + Authentication in urllib2. - Issue #14807: move undocumented tarfile.filemode() to stat.filemode() and add doc entry. Add tarfile.filemode alias with deprecation warning. @@ -7673,7 +7673,7 @@ Library IDLE ---- -- Issue #14958: Change IDLE systax highlighting to recognize all string and +- Issue #14958: Change IDLE syntax highlighting to recognize all string and byte literals supported in Python 3.3. - Issue #10997: Prevent a duplicate entry in IDLE's "Recent Files" menu. @@ -10176,7 +10176,7 @@ IDLE - Issue #13296: Fix IDLE to clear compile __future__ flags on shell restart. (Patch by Roger Serwy) -- Issue #9871: Prevent IDLE 3 crash when given byte stings +- Issue #9871: Prevent IDLE 3 crash when given byte strings with invalid hex escape sequences, like b'\x0'. (Original patch by Claudiu Popa.) @@ -12098,7 +12098,7 @@ Library - Issue #9632: Remove sys.setfilesystemencoding() function: use PYTHONFSENCODING environment variable to set the filesystem encoding at Python startup. sys.setfilesystemencoding() creates inconsistencies because it is unable to - reencode all filenames in all objects. + re-encode all filenames in all objects. - Issue #9410: Various optimizations to the pickle module, leading to speedups up to 4x (depending on the benchmark). Mostly ported from Unladen Swallow; @@ -12509,7 +12509,7 @@ Library - Issue #9605: posix.getlogin() decodes the username with file filesystem encoding and surrogateescape error handler. Patch written by David Watson. -- Issue #9604: posix.initgroups() encodes the username using the fileystem +- Issue #9604: posix.initgroups() encodes the username using the filesystem encoding and surrogateescape error handler. Patch written by David Watson. - Issue #9603: posix.ttyname() and posix.ctermid() decode the terminal name @@ -12667,7 +12667,7 @@ What's New in Python 3.2 Alpha 1? Core and Builtins ----------------- -- Issue #8991: convertbuffer() rejects discontigious buffers. +- Issue #8991: convertbuffer() rejects discontiguous buffers. - Issue #7616: Fix copying of overlapping memoryview slices with the Intel compiler. @@ -13211,7 +13211,7 @@ Library - Issue #7989: Added pure python implementation of the `datetime` module. The C module is renamed to `_datetime` and if available, overrides all classes - defined in datetime with fast C impementation. Python implementation is based + defined in datetime with fast C implementation. Python implementation is based on the original python prototype for the datetime module by Tim Peters with minor modifications by the PyPy project. The test suite now tests `datetime` module with and without `_datetime` acceleration using the same test cases. @@ -15049,7 +15049,7 @@ Extension Modules an error. The _PY_STRUCT_FLOAT_COERCE constant has been removed. The version number has been bumped to 0.3. -- Issue #5359: Readd the Berkeley DB detection code to allow _dbm be built +- Issue #5359: Re-add the Berkeley DB detection code to allow _dbm be built using Berkeley DB. Tests @@ -17028,7 +17028,7 @@ Extension Modules and renamed to filter(), map(), and zip(). Also, renamed izip_longest() to zip_longest() and ifilterfalse() to filterfalse(). -- Issue #1762972: Readded the reload() function as imp.reload(). +- Issue #1762972: Re-added the reload() function as imp.reload(). - Bug #2111: mmap segfaults when trying to write a block opened with PROT_READ. @@ -18448,7 +18448,7 @@ Core and builtins - Fixed bug #1459029 - unicode reprs were double-escaped. -- Patch #1396919: The system scope threads are reenabled on FreeBSD +- Patch #1396919: The system scope threads are re-enabled on FreeBSD 5.4 and later versions. - Bug #1115379: Compiling a Unicode string with an encoding declaration @@ -21803,7 +21803,7 @@ Library - New csv package makes it easy to read/write CSV files. - Module shlex has been extended to allow posix-like shell parsings, - including a split() function for easy spliting of quoted strings and + including a split() function for easy splitting of quoted strings and commands. An iterator interface was also implemented. Tools/Demos @@ -27751,7 +27751,7 @@ Fri Mar 12 22:15:43 1999 Guido van Rossum The filename to URL conversion didn't properly quote special characters. - The URL to filename didn't properly unquote special chatacters. + The URL to filename didn't properly unquote special characters. * Objects/floatobject.c: OK, try again. Vladimir gave me a fix for the alignment bus error, @@ -27807,7 +27807,7 @@ Wed Mar 10 22:55:47 1999 Guido van Rossum classes in selected module methods of selected class - Sinlge clicking in a directory, module or class item updates the next + Single clicking in a directory, module or class item updates the next column with info about the selected item. Double clicking in a module, class or method item opens the file (and selects the clicked item if it is a class or method). @@ -28130,7 +28130,7 @@ webchecker and other ftp retrieves. - ConfigParser's get() method now accepts an optional keyword argument (vars) that is substituted on top of the defaults that were setup in -__init__. You can now also have recusive references in your +__init__. You can now also have recursive references in your configuration file. - Some improvements to the Queue module, including a put_nowait() @@ -28209,7 +28209,7 @@ core. not. - The curses module implements an optional nlines argument to -w.scroll(). (It then calls wscrl(win, nlines) instead of scoll(win).) +w.scroll(). (It then calls wscrl(win, nlines) instead of scroll(win).) Changes to tools ---------------- @@ -28504,7 +28504,7 @@ PyEval_GetGlobals. - glmodule.c: check in the changed version after running the stubber again -- this solves the conflict with curses over the 'clear' entry point much nicer. (Jack Jansen had checked in the changes to cstubs -eons ago, but I never regenrated glmodule.c :-( ) +eons ago, but I never regenerated glmodule.c :-( ) - frameobject.c: fix reference count bug in PyFrame_New. Vladimir Marangozov. @@ -28581,7 +28581,7 @@ idiom L1[len(L1):] = L2. - Better error messages when a sequence is indexed with a non-integer. -- Bettter error message when calling a non-callable object (include +- Better error message when calling a non-callable object (include the type in the message). Python services @@ -28656,7 +28656,7 @@ Internet Protocols and Support - imaplib.py: new version from Piers Lauder. - smtplib.py: change sendmail() method to accept a single string or a -list or strings as the destination (commom newbie mistake). +list or strings as the destination (common newbie mistake). - poplib.py: LIST with a msg argument fixed. @@ -31109,7 +31109,7 @@ encoding/decoding CGI form arguments. Catch all errors from the ftp module. HTTP requests now add the Host: header line. The proxy variable names are now mapped to lower case, for Windows. The spliturl() function no longer erroneously throws away all data past -the first newline. The basejoin() function now intereprets "../" +the first newline. The basejoin() function now interprets "../" correctly. I *believe* that the problems with "exception raised in __del__" under certain circumstances have been fixed (mostly by changes elsewher in the interpreter). @@ -31397,7 +31397,7 @@ changes and fixes. - Added a bunch of new winfo options to Tkinter.py; we should now be up to date with Tk 4.2. The new winfo options supported are: -mananger, pointerx, pointerxy, pointery, server, viewable, visualid, +manager, pointerx, pointerxy, pointery, server, viewable, visualid, visualsavailable. - The broken bind() method on Canvas objects defined in the Canvas.py @@ -32552,7 +32552,7 @@ The same applies to posixfile.open() and the socket method makefile(). is being maintained and distributed separately. - Improved support for the Apple Macintosh, in part by Jack Jansen, -e.g. interfaces to (a few) resource mananger functions, get/set file +e.g. interfaces to (a few) resource manager functions, get/set file type and creator, gestalt, sound manager, speech manager, MacTCP, comm toolbox, and the think C console library. This is being maintained and distributed separately. @@ -33229,7 +33229,7 @@ sys.argv[0]; it can simply do "if __name__ == '__main__': main()". * When an object is printed by the print statement, its implementation of str() is used. This means that classes can define __str__(self) to direct how their instances are printed. This is different from -__repr__(self), which should define an unambigous string +__repr__(self), which should define an unambiguous string representation of the instance. (If __str__() is not defined, it defaults to __repr__().) @@ -34366,7 +34366,7 @@ eval_code) and ceval.h (which doesn't need compile.hand declares the rest) ceval.h defines macros BGN_SAVE / END_SAVE for use with threads (to -improve the parallellism of multi-threaded programs by letting other +improve the parallelism of multi-threaded programs by letting other Python code run when a blocking system call or something similar is made) @@ -34514,7 +34514,7 @@ names listed in a 'global' statement must not be used in the function before the statement is reached. Remember that you don't need to use 'global' if you only want to *use* -a global variable in a function; nor do you need ot for assignments to +a global variable in a function; nor do you need to for assignments to parts of global variables (e.g., list or dictionary items or attributes of class instances). This has not changed; in fact assignment to part of a global variable was the standard workaround. diff --git a/Misc/NEWS.d/3.10.0a4.rst b/Misc/NEWS.d/3.10.0a4.rst index 398f7e5d3422cb..ae667f2bffe192 100644 --- a/Misc/NEWS.d/3.10.0a4.rst +++ b/Misc/NEWS.d/3.10.0a4.rst @@ -412,7 +412,7 @@ be created automatically. ``logging.disable`` will now validate the types and value of its parameter. It also now accepts strings representing the levels (as does -``loging.setLevel``) instead of only the numerical values. +``logging.setLevel``) instead of only the numerical values. .. diff --git a/Misc/NEWS.d/3.11.0a1.rst b/Misc/NEWS.d/3.11.0a1.rst index e6cf9c001a1a01..40fbb9d42b7944 100644 --- a/Misc/NEWS.d/3.11.0a1.rst +++ b/Misc/NEWS.d/3.11.0a1.rst @@ -972,7 +972,7 @@ manager` protocols correspondingly. .. section: Core and Builtins Make sure that the line number is set when entering a comprehension scope. -Ensures that backtraces inclusing generator expressions show the correct +This ensures that backtraces including generator expressions show the correct line number. .. @@ -2722,7 +2722,7 @@ Importing typing.io or typing.re now prints a ``DeprecationWarning``. .. section: Library argparse actions store_const and append_const each receive a default value -of None when the ``const`` kwarg is not provided. Previously, this raised a +of ``None`` when the ``const`` kwarg is not provided. Previously, this raised a :exc:`TypeError`. .. @@ -3995,7 +3995,7 @@ operator expressions. .. section: Documentation Document that :class:`collections.defaultdict` parameter ``default_factory`` -defaults to None and is positional-only. +defaults to ``None`` and is positional-only. .. diff --git a/Misc/NEWS.d/3.11.0a2.rst b/Misc/NEWS.d/3.11.0a2.rst index a6b5fe54b391c5..05644d0a4639b1 100644 --- a/Misc/NEWS.d/3.11.0a2.rst +++ b/Misc/NEWS.d/3.11.0a2.rst @@ -34,7 +34,7 @@ module but frozen modules are disabled. .. nonce: veL4lJ .. section: Core and Builtins -Specialize simple calls to Python functions (no starargs, keyowrd dict, or +Specialize simple calls to Python functions (no starargs, keyword dict, or closure) .. @@ -331,8 +331,8 @@ underlying SQLite API signals memory error. Patch by Erlend E. Aasland. .. nonce: 4MQt4r .. section: Library -pprint.pprint() now handles underscore_numbers correctly. Previously it was -always setting it to False. +:func:`pprint.pprint` now handles *underscore_numbers* correctly. +Previously it was always setting it to ``False``. .. diff --git a/Misc/NEWS.d/3.11.0a5.rst b/Misc/NEWS.d/3.11.0a5.rst index 30a462e9bfdcbf..954f5c18b48000 100644 --- a/Misc/NEWS.d/3.11.0a5.rst +++ b/Misc/NEWS.d/3.11.0a5.rst @@ -748,7 +748,7 @@ tests to use ``support.infinite_recursion()``. Patch by Victor Stinner. Skip test_builtin PTY tests on non-ASCII characters if the readline module is loaded. The readline module changes input() behavior, but test_builtin is -not intented to test the readline module. Patch by Victor Stinner. +not intended to test the readline module. Patch by Victor Stinner. .. diff --git a/Misc/NEWS.d/3.11.0a6.rst b/Misc/NEWS.d/3.11.0a6.rst index 2fdceef7746d4e..66ffa4ffba52e5 100644 --- a/Misc/NEWS.d/3.11.0a6.rst +++ b/Misc/NEWS.d/3.11.0a6.rst @@ -1088,7 +1088,7 @@ Patch by Kumar Aditya. Fix wasm32-emscripten test failures and platform issues. - Disable syscalls that are not supported or don't work, e.g. wait, getrusage, prlimit, -mkfifo, mknod, setres[gu]id, setgroups. - Use fd_count to cound open fds. - +mkfifo, mknod, setres[gu]id, setgroups. - Use fd_count to count open fds. - Add more checks for subprocess and fork. - Add workarounds for missing _multiprocessing and failing socket.accept(). - Enable bzip2. - Disable large file support. - Disable signal.alarm. @@ -1162,7 +1162,7 @@ Terry Jan Reedy. .. section: C API Python's public headers no longer import ````, leaving code that -embedd/extends Python free to define ``bool``, ``true`` and ``false``. +embeds/extends Python free to define ``bool``, ``true`` and ``false``. .. @@ -1182,7 +1182,7 @@ internal C API ``pycore_frame.h`` header file. Patch by Victor Stinner. .. section: C API Rename ``Include/buffer.h`` header file to ``Include/pybuffer.h`` to avoid -conflits with projects having an existing ``buffer.h`` header file. Patch by +conflicts with projects having an existing ``buffer.h`` header file. Patch by Victor Stinner. .. @@ -1202,5 +1202,5 @@ API). Patch by Victor Stinner. .. nonce: __ZdpH .. section: C API -Added function :c:func:`PyType_GetModuleByDef`, which allows accesss to +Added function :c:func:`PyType_GetModuleByDef`, which allows access to module state when a method's defining class is not available. diff --git a/Misc/NEWS.d/3.11.0a7.rst b/Misc/NEWS.d/3.11.0a7.rst index f4e2ad8db678f5..a376c8becea9f4 100644 --- a/Misc/NEWS.d/3.11.0a7.rst +++ b/Misc/NEWS.d/3.11.0a7.rst @@ -1173,7 +1173,7 @@ implemented. .. section: Library Add an Barrier object in synchronization primitives of *asyncio* Lib in -order to be consistant with Barrier from *threading* and *multiprocessing* +order to be consistent with Barrier from *threading* and *multiprocessing* libs* .. @@ -1211,7 +1211,7 @@ Update PEP URLs to :pep:`676`'s new canonical form. .. nonce: 4Dn48U .. section: Documentation -Clarified the old Python versions compatiblity note of +Clarified the old Python versions compatibility note of :func:`binascii.crc32` / :func:`zlib.adler32` / :func:`zlib.crc32` functions. diff --git a/Misc/NEWS.d/3.11.0b1.rst b/Misc/NEWS.d/3.11.0b1.rst index f9296679655573..c35e8e2c1caf07 100644 --- a/Misc/NEWS.d/3.11.0b1.rst +++ b/Misc/NEWS.d/3.11.0b1.rst @@ -58,10 +58,10 @@ may have prevented Python-to-Python calls respecting PEP 523. .. nonce: -igcjS .. section: Core and Builtins -Add a closure keyword-only parameter to exec(). It can only be specified +Add a closure keyword-only parameter to :func:`exec()`. It can only be specified when exec-ing a code object that uses free variables. When specified, it must be a tuple, with exactly the number of cell variables referenced by the -code object. closure has a default value of None, and it must be None if the +code object. closure has a default value of ``None``, and it must be ``None`` if the code object doesn't refer to any free variables. .. @@ -664,8 +664,9 @@ for :func:`os.fcopyfile` available in macOs. .. nonce: l1p7CJ .. section: Library -For @dataclass, add weakref_slot. Default is False. If True, and if -slots=True, add a slot named "__weakref__", which will allow instances to be +For :func:`@dataclass `, add *weakref_slot*. +The new parameter defaults to ``False``. If true, and if +``slots=True``, add a slot named ``"__weakref__"``, which will allow instances to be weakref'd. Contributed by Eric V. Smith .. diff --git a/Misc/NEWS.d/3.12.0a1.rst b/Misc/NEWS.d/3.12.0a1.rst index f75a83c1d950d4..fc654d60a4a0e4 100644 --- a/Misc/NEWS.d/3.12.0a1.rst +++ b/Misc/NEWS.d/3.12.0a1.rst @@ -560,7 +560,7 @@ versions prior to 3.11 .. nonce: 9lmTCC .. section: Core and Builtins -Remove two cases of undefined behavoir, by adding NULL checks. +Remove two cases of undefined behavior, by adding NULL checks. .. @@ -2591,7 +2591,7 @@ Update bundled pip to 22.2.2. Fix :class:`asyncio.TaskGroup` to propagate exception when :exc:`asyncio.CancelledError` was replaced with another exception by a -context manger. Patch by Kumar Aditya and Guido van Rossum. +context manager. Patch by Kumar Aditya and Guido van Rossum. .. @@ -3561,8 +3561,8 @@ with :func:`os.pidfd_open` in non-blocking mode. Patch by Kumar Aditya. .. nonce: mkYl5q .. section: Library -Implement Enum __contains__ that returns True or False to replace the -deprecated behaviour that would sometimes raise a TypeError. +Implement ``Enum.__contains__`` that returns ``True`` or ``False`` to replace the +deprecated behaviour that would sometimes raise a :exc:`TypeError`. .. @@ -3729,7 +3729,7 @@ In a very special case, the email package tried to append the nonexistent .. nonce: e6uKxj .. section: Library -Fix :func:`ast.unparse` when ``ImportFrom.level`` is None +Fix :func:`ast.unparse` when ``ImportFrom.level`` is ``None`` .. @@ -3791,7 +3791,7 @@ the :c:type:`time_t` type in C. .. section: Library Fixed crash resulting from calling bisect.insort() or bisect.insort_left() -with the key argument not equal to None. +with the key argument not equal to ``None``. .. @@ -4080,7 +4080,7 @@ replacement strings containing group references by 2--3 times. .. section: Library Fix findtext in the xml module to only give an empty string when the text -attribute is set to None. +attribute is set to ``None``. .. @@ -4476,7 +4476,7 @@ they are deprecated. Contributed by C.A.M. Gerlach. .. nonce: we7AFm .. section: Documentation -Replaced incorrectly written true/false values in documentiation. Patch by +Replaced incorrectly written true/false values in documentation. Patch by Robert O'Shea .. diff --git a/Misc/NEWS.d/3.12.0a2.rst b/Misc/NEWS.d/3.12.0a2.rst index f1d69d9b3e7638..88d84ad93b35b5 100644 --- a/Misc/NEWS.d/3.12.0a2.rst +++ b/Misc/NEWS.d/3.12.0a2.rst @@ -822,7 +822,7 @@ and to indicate when it became late-bound. .. nonce: 7KinCV .. section: Tests -The Python test suite now fails wit exit code 4 if no tests ran. It should +The Python test suite now fails with exit code 4 if no tests ran. It should help detecting typos in test names and test methods. .. diff --git a/Misc/NEWS.d/3.12.0a3.rst b/Misc/NEWS.d/3.12.0a3.rst index ce128fd5f80c77..07593998d80891 100644 --- a/Misc/NEWS.d/3.12.0a3.rst +++ b/Misc/NEWS.d/3.12.0a3.rst @@ -82,7 +82,7 @@ Victor Stinner. .. section: Core and Builtins Fixed a bug that was causing a buffer overflow if the tokenizer copies a -line missing the newline caracter from a file that is as long as the +line missing the newline character from a file that is as long as the available tokenizer buffer. Patch by Pablo galindo .. @@ -496,7 +496,7 @@ Created packages from zipfile and test_zipfile modules, separating Fix :attr:`~ipaddress.IPv4Address.is_private` properties in the :mod:`ipaddress` module. Previously non-private networks (0.0.0.0/0) would -return True from this method; now they correctly return False. +return ``True`` from this method; now they correctly return ``False``. .. diff --git a/Misc/NEWS.d/3.12.0a4.rst b/Misc/NEWS.d/3.12.0a4.rst index 82faa5ad0b2031..d7af30f6c09b2b 100644 --- a/Misc/NEWS.d/3.12.0a4.rst +++ b/Misc/NEWS.d/3.12.0a4.rst @@ -65,8 +65,8 @@ redundant. .. nonce: M2n6Kg .. section: Core and Builtins -Fix :func:`int.__sizeof__` calculation to include the 1 element ob_digit -array for 0 and False. +Fix :func:`int.__sizeof__` calculation to include the 1-element ``ob_digit`` +array for ``0`` and ``False``. .. @@ -830,7 +830,7 @@ Reduced the memory usage of :func:`urllib.parse.unquote` and .. section: Library ``inspect.signature`` was raising ``TypeError`` on call with mock objects. -Now it correctly returns ``(*args, **kwargs)`` as infered signature. +Now it correctly returns ``(*args, **kwargs)`` as inferred signature. .. diff --git a/Misc/NEWS.d/3.12.0a5.rst b/Misc/NEWS.d/3.12.0a5.rst index 8cf90b0e9cde46..effda2be6fd26c 100644 --- a/Misc/NEWS.d/3.12.0a5.rst +++ b/Misc/NEWS.d/3.12.0a5.rst @@ -506,7 +506,7 @@ inheritance. .. nonce: 7sQz5l .. section: Build -Update BOLT configration not to use depreacted usage of ``--split +Update BOLT configuration not to use deprecated usage of ``--split functions``. Patch by Donghee Na. .. diff --git a/Misc/NEWS.d/3.12.0a6.rst b/Misc/NEWS.d/3.12.0a6.rst index 05f9243eb6b1bc..382dae33fcaee1 100644 --- a/Misc/NEWS.d/3.12.0a6.rst +++ b/Misc/NEWS.d/3.12.0a6.rst @@ -453,7 +453,7 @@ E. Aasland. .. section: Library Change repr of :class:`collections.OrderedDict` to use regular dictionary -formating instead of pairs of keys and values. +formatting instead of pairs of keys and values. .. diff --git a/Misc/NEWS.d/3.12.0b1.rst b/Misc/NEWS.d/3.12.0b1.rst index 764b80b7b7d436..9f3095b224233e 100644 --- a/Misc/NEWS.d/3.12.0b1.rst +++ b/Misc/NEWS.d/3.12.0b1.rst @@ -395,7 +395,7 @@ Fix bug in line numbers of instructions emitted for :keyword:`except* .. section: Core and Builtins Clarify :exc:`SyntaxWarning` with literal ``is`` comparison by specifying -which literal is problematic, since comparisons using ``is`` with e.g. None +which literal is problematic, since comparisons using ``is`` with e.g. ``None`` and bool literals are idiomatic. .. @@ -1446,7 +1446,7 @@ Adapt the :mod:`winsound` extension module to :pep:`687`. .. nonce: jurMzv .. section: Library -Remove deprecation of enum ``memmber.member`` access. +Remove deprecation of enum ``member.member`` access. .. diff --git a/Misc/NEWS.d/3.13.0a1.rst b/Misc/NEWS.d/3.13.0a1.rst index 4937f9da5ae629..9a321f779c24ff 100644 --- a/Misc/NEWS.d/3.13.0a1.rst +++ b/Misc/NEWS.d/3.13.0a1.rst @@ -563,7 +563,7 @@ on deallocation. Fix :meth:`multiprocessing.synchronize.SemLock.__setstate__` to properly initialize :attr:`multiprocessing.synchronize.SemLock._is_fork_ctx`. This -fixes a regression when passing a SemLock accross nested processes. +fixes a regression when passing a SemLock across nested processes. Rename :attr:`multiprocessing.synchronize.SemLock.is_fork_ctx` to :attr:`multiprocessing.synchronize.SemLock._is_fork_ctx` to avoid exposing @@ -708,7 +708,7 @@ Fixes crash when tracing in recursive calls to Python classes. .. section: Core and Builtins Remove the ``_PyCFrame`` struct, moving the pointer to the current -intepreter frame back to the threadstate, as it was for 3.10 and earlier. +interpreter frame back to the threadstate, as it was for 3.10 and earlier. The ``_PyCFrame`` existed as a performance optimization for tracing. Since PEP 669 has been implemented, this optimization no longer applies. @@ -926,7 +926,7 @@ Isolate :mod:`!_decimal` (apply :pep:`687`). Patch by Charlie Zhao. Add the exception as the third argument to ``PY_UNIND`` callbacks in ``sys.monitoring``. This makes the ``PY_UNWIND`` callback consistent with -the other exception hanlding callbacks. +the other exception handling callbacks. .. @@ -935,7 +935,7 @@ the other exception hanlding callbacks. .. nonce: DdEwV8 .. section: Core and Builtins -Raise a ``ValueError`` when a monitoring callback funtion returns +Raise a ``ValueError`` when a monitoring callback function returns ``DISABLE`` for events that cannot be disabled locally. .. @@ -1006,7 +1006,7 @@ Add :meth:`dbm.gnu.gdbm.clear` to :mod:`dbm.gnu`. Patch By Donghee Na. .. section: Core and Builtins The ASYNC and AWAIT tokens are removed from the Grammar, which removes the -posibility of making ``async`` and ``await`` soft keywords when using +possibility of making ``async`` and ``await`` soft keywords when using ``feature_version<7`` in :func:`ast.parse`. .. @@ -1028,7 +1028,7 @@ the call is not a classmethod. .. nonce: DdqHFg .. section: Core and Builtins -Python no longer crashes due an infrequent race when initialzing +Python no longer crashes due an infrequent race when initializing per-interpreter interned strings. The crash would manifest when the interpreter was finalized. @@ -1922,7 +1922,7 @@ objects .. nonce: RDGe8- .. section: Library -Deprecation warning about non-integer number in :mod:`gettext` now alwais +Deprecation warning about non-integer number in :mod:`gettext` now always refers to the line in the user code where gettext function or method is used. Previously it could refer to a line in ``gettext`` code. @@ -1964,7 +1964,7 @@ debugging. .. nonce: LCxiau .. section: Library -Fix :func:`termios.tcsetattr` bug that was overwritting existing errors +Fix :func:`termios.tcsetattr` bug that was overwriting existing errors during parsing integers from ``term`` list. .. @@ -2047,7 +2047,7 @@ point. .. nonce: fECxTj .. section: Library -On Windows, multiprocessing ``Popen.terminate()`` now catchs +On Windows, multiprocessing ``Popen.terminate()`` now catches :exc:`PermissionError` and get the process exit code. If the process is still running, raise again the :exc:`PermissionError`. Otherwise, the process terminated as expected: store its exit code. Patch by Victor @@ -2857,7 +2857,7 @@ Seems that in some conditions, OpenSSL will return ``SSL_ERROR_SYSCALL`` instead of ``SSL_ERROR_SSL`` when a certification verification has failed, but the error parameters will still contain ``ERR_LIB_SSL`` and ``SSL_R_CERTIFICATE_VERIFY_FAILED``. We are now detecting this situation and -raising the appropiate ``ssl.SSLCertVerificationError``. Patch by Pablo +raising the appropriate ``ssl.SSLCertVerificationError``. Patch by Pablo Galindo .. @@ -2979,7 +2979,7 @@ method. Patch by James Cave. .. section: Library Fix overflow on 32-bit systems with :mod:`asyncio` :func:`os.sendfile` -implemention. +implementation. .. @@ -3251,7 +3251,7 @@ Eliseev. .. nonce: NN35-U .. section: Library -Optimize ``(?!)`` (pattern which alwais fails) in regular expressions. +Optimize ``(?!)`` (pattern which always fails) in regular expressions. .. @@ -3436,7 +3436,8 @@ added support for this decorator. Patch by Alex Waygood. .. nonce: C1ahtk .. section: Library -Make pydoc.doc catch bad module ImportError when output stream is not None. +Make :func:`pydoc.doc` catch bad module :exc:`ImportError` +when output stream is not ``None``. .. @@ -3494,7 +3495,7 @@ star imports. .. nonce: TJEUkd .. section: Library -Zipapp will now skip over apending an archive to itself. +Zipapp will now skip over appending an archive to itself. .. @@ -4563,7 +4564,7 @@ Deprecate passing any arguments to :func:`threading.RLock`. .. nonce: o5Zb0t .. section: Library -Refactored ``zipfile._strip_extra`` to use higher level abstactions for +Refactored ``zipfile._strip_extra`` to use higher level abstractions for extras instead of a heavy-state loop. .. @@ -5013,7 +5014,7 @@ by Victor Stinner. Fix test_timeout() of test_concurrent_futures.test_wait. Remove the future which may or may not complete depending if it takes longer than the timeout -ot not. Keep the second future which does not complete before wait() +or not. Keep the second future which does not complete before wait() timeout. Patch by Victor Stinner. .. @@ -5103,7 +5104,7 @@ Victor Stinner. regrtest: Add ``--fast-ci`` and ``--slow-ci`` options. ``--fast-ci`` uses a default timeout of 10 minutes and ``-u all,-cpu`` (skip slowest tests). -``--slow-ci`` uses a default timeout of 20 minues and ``-u all`` (run all +``--slow-ci`` uses a default timeout of 20 minutes and ``-u all`` (run all tests). Patch by Victor Stinner. .. @@ -5231,7 +5232,7 @@ and ``sysctl net.inet.udp.blackhole=1``). Patch by Victor Stinner. Skip ``test_gdb`` if gdb is unable to retrieve Python frame objects: if a frame is ````. When Python is built with "clang -Og", gdb can -fail to retrive the *frame* parameter of ``_PyEval_EvalFrameDefault()``. In +fail to retrieve the *frame* parameter of ``_PyEval_EvalFrameDefault()``. In this case, tests like ``py_bt()`` are likely to fail. Without getting access to Python frames, ``python-gdb.py`` is mostly clueless on retrieving the Python traceback. Moreover, ``test_gdb`` is no longer skipped on macOS if diff --git a/Misc/NEWS.d/3.13.0a2.rst b/Misc/NEWS.d/3.13.0a2.rst index e5841e14c02efb..c6b2b1b263ffab 100644 --- a/Misc/NEWS.d/3.13.0a2.rst +++ b/Misc/NEWS.d/3.13.0a2.rst @@ -228,7 +228,7 @@ cross-interpreter API. Patch by Anthony Shaw. .. nonce: ageUWQ .. section: Core and Builtins -Add support for sharing of True and False between interpreters using the +Add support for sharing of ``True`` and ``False`` between interpreters using the cross-interpreter API. Patch by Anthony Shaw. .. @@ -974,7 +974,7 @@ pattern. .. nonce: 6ah-aw .. section: Library -Add the :attr:`ipaddress.IPv4Address.ipv6_mapped` property, which retuns the +Add the :attr:`ipaddress.IPv4Address.ipv6_mapped` property, which returns the IPv4-mapped IPv6 address. .. @@ -1354,8 +1354,8 @@ crash encountered after the first :meth:`tkinter.Tk` instance is destroyed. .. section: IDLE Add docstrings to the IDLE debugger module. Fix two bugs: initialize -Idb.botframe (should be in Bdb); in Idb.in_rpc_code, check whether -prev_frame is None before trying to use it. Greatly expand test_debugger. +``Idb.botframe`` (should be in Bdb); in ``Idb.in_rpc_code``, check whether +``prev_frame`` is ``None`` before trying to use it. Greatly expand test_debugger. .. diff --git a/Misc/NEWS.d/3.13.0a3.rst b/Misc/NEWS.d/3.13.0a3.rst index 218ba609bd80c0..2c660192dcd5b3 100644 --- a/Misc/NEWS.d/3.13.0a3.rst +++ b/Misc/NEWS.d/3.13.0a3.rst @@ -269,7 +269,7 @@ Correctly compute end column offsets for multiline tokens in the .. nonce: 4ADN7i .. section: Core and Builtins -Fix None.__ne__(None) returning NotImplemented instead of False +Fix ``None.__ne__(None)`` returning ``NotImplemented`` instead of ``False``. .. @@ -609,7 +609,7 @@ with the documentation) :func:`asyncio.Condition.wait()` now re-raises the same :exc:`CancelledError` instance that may have caused it to be interrupted. -Fixed race condition in :func:`asyncio.Semaphore.aquire` when interrupted +Fixed race condition in :func:`asyncio.Semaphore.acquire` when interrupted with a :exc:`CancelledError`. .. @@ -928,7 +928,7 @@ on Windows. .. section: Library Fix :func:`shutil.copymode` and :func:`shutil.copystat` on Windows. -Previously they worked differenly if *dst* is a symbolic link: they modified +Previously they worked differently if *dst* is a symbolic link: they modified the permission bits of *dst* itself rather than the file it points to if *follow_symlinks* is true or *src* is not a symbolic link, and did not modify the permission bits if *follow_symlinks* is false and *src* is a @@ -1550,7 +1550,7 @@ addresses are encountered instead of potentially inaccurate values. Add optional *strict* parameter to these two functions: use ``strict=False`` to get the old behavior, accept malformed inputs. ``getattr(email.utils, 'supports_strict_parsing', False)`` can be use to check if the *strict* -paramater is available. Patch by Thomas Dwyer and Victor Stinner to improve +parameter is available. Patch by Thomas Dwyer and Victor Stinner to improve the :cve:`2023-27043` fix. .. @@ -1615,7 +1615,7 @@ method of :class:`itertools.pairwise`. .. section: Library Small (10 - 20%) and trivial performance improvement of -:func:`urrlib.request.getproxies_environment`, typically useful when there +:func:`urllib.request.getproxies_environment`, typically useful when there are many environment variables to go over. .. diff --git a/Misc/NEWS.d/3.13.0a4.rst b/Misc/NEWS.d/3.13.0a4.rst index 39af0534cf8fb5..5efc244c6086cc 100644 --- a/Misc/NEWS.d/3.13.0a4.rst +++ b/Misc/NEWS.d/3.13.0a4.rst @@ -1181,7 +1181,7 @@ configure.ac. .. nonce: XcEXEZ .. section: Build -configure and Makefile were refactored to accomodate framework builds on +configure and Makefile were refactored to accommodate framework builds on Apple platforms other than macOS. .. diff --git a/Misc/NEWS.d/3.13.0a5.rst b/Misc/NEWS.d/3.13.0a5.rst index 55dee59827ad8f..6d74c6bc5c4d55 100644 --- a/Misc/NEWS.d/3.13.0a5.rst +++ b/Misc/NEWS.d/3.13.0a5.rst @@ -962,7 +962,7 @@ On Windows, commas passed in arguments to ``Tools\buildbot\test.bat`` and .. nonce: -dlzfI .. section: Tests -Fix translation of exception hander targets by +Fix translation of exception handler targets by ``_testinternalcapi.optimize_cfg``. .. diff --git a/Misc/NEWS.d/3.13.0a6.rst b/Misc/NEWS.d/3.13.0a6.rst index 06807b396ed5da..4d44bc664ef8b0 100644 --- a/Misc/NEWS.d/3.13.0a6.rst +++ b/Misc/NEWS.d/3.13.0a6.rst @@ -913,7 +913,7 @@ call. .. nonce: IMLi6K .. section: Documentation -Remove compatibilty references to Emscripten. +Remove compatibility references to Emscripten. .. diff --git a/Misc/NEWS.d/3.13.0b1.rst b/Misc/NEWS.d/3.13.0b1.rst index 8d49ff06efd07b..09b62c8377aabd 100644 --- a/Misc/NEWS.d/3.13.0b1.rst +++ b/Misc/NEWS.d/3.13.0b1.rst @@ -300,6 +300,7 @@ Improve :exc:`SyntaxError` message for empty type param brackets. .. nonce: qDEB66 .. section: Core and Builtins +Fix :func:`os.path.normpath` for UNC paths on Windows. Speed up :func:`os.path.splitroot` with a native implementation. .. @@ -1346,13 +1347,13 @@ urllib. .. nonce: du4UKW .. section: Library -Callbacks registered in the :mod:`tkinter` module now take arguments as -various Python objects (``int``, ``float``, ``bytes``, ``tuple``), not just -``str``. To restore the previous behavior set :mod:`!tkinter` module global -:data:`~tkinter.wantobject` to ``1`` before creating the -:class:`~tkinter.Tk` object or call the :meth:`~tkinter.Tk.wantobject` -method of the :class:`!Tk` object with argument ``1``. Calling it with -argument ``2`` restores the current default behavior. +Setting the :mod:`!tkinter` module global :data:`~tkinter.wantobject` to ``2`` +before creating the :class:`~tkinter.Tk` object or call the +:meth:`~tkinter.Tk.wantobject` method of the :class:`!Tk` object with argument +``2`` makes now arguments to callbacks registered in the :mod:`tkinter` module +to be passed as various Python objects (``int``, ``float``, ``bytes``, ``tuple``), +depending on their internal representation in Tcl, instead of always ``str``. +:data:`!tkinter.wantobject` is now set to ``2`` by default. .. diff --git a/Misc/NEWS.d/3.13.0b2.rst b/Misc/NEWS.d/3.13.0b2.rst new file mode 100644 index 00000000000000..1b31fd5e9b2c34 --- /dev/null +++ b/Misc/NEWS.d/3.13.0b2.rst @@ -0,0 +1,805 @@ +.. date: 2024-05-08-21-59-38 +.. gh-issue: 118773 +.. nonce: 7dFRJY +.. release date: 2024-06-05 +.. section: Security + +Fixes creation of ACLs in :func:`os.mkdir` on Windows to work correctly on +non-English machines. + +.. + +.. date: 2024-05-01-20-57-09 +.. gh-issue: 118486 +.. nonce: K44KJG +.. section: Security + +:func:`os.mkdir` on Windows now accepts *mode* of ``0o700`` to restrict the +new directory to the current user. This fixes :cve:`2024-4030` affecting +:func:`tempfile.mkdtemp` in scenarios where the base temporary directory is +more permissive than the default. + +.. + +.. date: 2024-06-03-13-25-04 +.. gh-issue: 119724 +.. nonce: EH1dkA +.. section: Core and Builtins + +Reverted improvements to error messages for ``elif``/``else`` statements not +matching any valid statements, which made in hard to locate the syntax +errors inside those ``elif``/``else`` blocks. + +.. + +.. date: 2024-05-31-12-06-11 +.. gh-issue: 119842 +.. nonce: tCGVsv +.. section: Core and Builtins + +Honor :c:func:`PyOS_InputHook` in the new REPL. Patch by Pablo Galindo + +.. + +.. date: 2024-05-30-23-01-00 +.. gh-issue: 119821 +.. nonce: jPGfvt +.. section: Core and Builtins + +Fix execution of :ref:`annotation scopes ` within classes +when ``globals`` is set to a non-dict. Patch by Jelle Zijlstra. + +.. + +.. date: 2024-05-25-16-45-27 +.. gh-issue: 119548 +.. nonce: pqF9Y6 +.. section: Core and Builtins + +Add a ``clear`` command to the REPL. Patch by Pablo Galindo + +.. + +.. date: 2024-05-25-13-51-48 +.. gh-issue: 111999 +.. nonce: L0q1gh +.. section: Core and Builtins + +Fix the signature of :meth:`str.format_map`. + +.. + +.. date: 2024-05-25-12-52-25 +.. gh-issue: 119560 +.. nonce: wSlm8q +.. section: Core and Builtins + +An invalid assert in beta 1 has been removed. The assert would fail if +``PyState_FindModule()`` was used in an extension module's init function +before the module def had been initialized. + +.. + +.. date: 2024-05-24-21-16-52 +.. gh-issue: 119369 +.. nonce: qBThho +.. section: Core and Builtins + +Fix deadlock during thread deletion in free-threaded build, which could +occur when the GIL was enabled at runtime. + +.. + +.. date: 2024-05-24-21-04-00 +.. gh-issue: 119525 +.. nonce: zLFLf1 +.. section: Core and Builtins + +Fix deadlock involving ``_PyType_Lookup()`` cache in the free-threaded build +when the GIL is dynamically enabled at runtime. + +.. + +.. date: 2024-05-23-06-34-45 +.. gh-issue: 119311 +.. nonce: 2DBwKR +.. section: Core and Builtins + +Fix bug where names are unexpectedly mangled in the bases of generic +classes. + +.. + +.. date: 2024-05-23-06-34-14 +.. gh-issue: 119395 +.. nonce: z-Hsqb +.. section: Core and Builtins + +Fix bug where names appearing after a generic class are mangled as if they +are in the generic class. + +.. + +.. date: 2024-05-21-11-27-14 +.. gh-issue: 119213 +.. nonce: nxjxrt +.. section: Core and Builtins + +Non-builtin modules built with argument clinic were crashing if used in a +subinterpreter before the main interpreter. The objects that were causing +the problem by leaking between interpreters carelessly have been fixed. + +.. + +.. date: 2024-05-21-09-46-51 +.. gh-issue: 119011 +.. nonce: WOe3bu +.. section: Core and Builtins + +Fixes ``type.__type_params__`` to return an empty tuple instead of a +descriptor. + +.. + +.. date: 2024-05-20-14-57-39 +.. gh-issue: 118692 +.. nonce: Qadm7F +.. section: Core and Builtins + +Avoid creating unnecessary :exc:`StopIteration` instances for monitoring. + +.. + +.. date: 2024-05-16-23-02-03 +.. gh-issue: 119049 +.. nonce: qpd_S- +.. section: Core and Builtins + +Fix displaying the source line for warnings created by the C API if the +:mod:`warnings` module had not yet been imported. + +.. + +.. date: 2024-05-11-21-44-17 +.. gh-issue: 118844 +.. nonce: q2H_km +.. section: Core and Builtins + +Fix build failures when configuring with both ``--disable-gil`` and +``--enable-experimental-jit``. + +.. + +.. date: 2024-05-10-19-54-18 +.. gh-issue: 118921 +.. nonce: O4ztZG +.. section: Core and Builtins + +Add ``copy()`` method for ``FrameLocalsProxy`` which returns a snapshot +``dict`` for local variables. + +.. + +.. date: 2024-05-09-19-47-12 +.. gh-issue: 117657 +.. nonce: Vn0Yey +.. section: Core and Builtins + +Fix data races on the field that stores a pointer to the interpreter's main +thread that occur in free-threaded builds. + +.. + +.. date: 2024-05-08-18-33-07 +.. gh-issue: 118507 +.. nonce: OCQsAY +.. section: Core and Builtins + +Speedup :func:`os.path.isjunction` and :func:`os.path.lexists` on Windows +with a native implementation. + +.. + +.. date: 2024-05-07-16-57-56 +.. gh-issue: 118561 +.. nonce: wNMKVd +.. section: Core and Builtins + +Fix race condition in free-threaded build where :meth:`!list.extend` could +expose uninitialised memory to concurrent readers. + +.. + +.. date: 2024-04-28-19-51-00 +.. gh-issue: 118263 +.. nonce: Gaap3S +.. section: Core and Builtins + +Speed up :func:`os.path.splitroot` & :func:`os.path.normpath` with a direct +C call. + +.. + +.. date: 2024-03-25-15-07-01 +.. gh-issue: 117195 +.. nonce: OWakgD +.. section: Core and Builtins + +Avoid assertion failure for debug builds when calling +``object.__sizeof__(1)`` + +.. + +.. date: 2024-06-04-12-23-01 +.. gh-issue: 119819 +.. nonce: WKKrYh +.. section: Library + +Fix regression to allow logging configuration with multiprocessing queue +types. + +.. + +.. date: 2024-06-03-11-18-16 +.. gh-issue: 117142 +.. nonce: kWTXQo +.. section: Library + +The :mod:`ctypes` module may now be imported in all subinterpreters, +including those that have their own GIL. + +.. + +.. date: 2024-06-02-15-09-17 +.. gh-issue: 118835 +.. nonce: KUAuz6 +.. section: Library + +Fix _pyrepl crash when using custom prompt with ANSI escape codes. + +.. + +.. date: 2024-06-01-16-58-43 +.. gh-issue: 117398 +.. nonce: kR0RW7 +.. section: Library + +The ``_datetime`` module (C implementation for :mod:`datetime`) now supports +being imported in multiple interpreters. + +.. + +.. date: 2024-05-30-21-37-05 +.. gh-issue: 89727 +.. nonce: D6S9ig +.. section: Library + +Fix issue with :func:`shutil.rmtree` where a :exc:`RecursionError` is raised +on deep directory trees. + +.. + +.. date: 2024-05-29-20-42-17 +.. gh-issue: 89727 +.. nonce: 5lPTTW +.. section: Library + +Partially fix issue with :func:`shutil.rmtree` where a :exc:`RecursionError` +is raised on deep directory trees. A recursion error is no longer raised +when :data:`!rmtree.avoids_symlink_attacks` is false. + +.. + +.. date: 2024-05-28-12-15-03 +.. gh-issue: 119118 +.. nonce: FMKz1F +.. section: Library + +Fix performance regression in the :mod:`tokenize` module by caching the +``line`` token attribute and calculating the column offset more efficiently. + +.. + +.. date: 2024-05-28-00-56-59 +.. gh-issue: 89727 +.. nonce: _bxoL3 +.. section: Library + +Fix issue with :func:`os.fwalk` where a :exc:`RecursionError` was raised on +deep directory trees by adjusting the implementation to be iterative instead +of recursive. + +.. + +.. date: 2024-05-26-21-28-11 +.. gh-issue: 119588 +.. nonce: wlLBK5 +.. section: Library + +``zipfile.Path.is_symlink`` now assesses if the given path is a symlink. + +.. + +.. date: 2024-05-25-20-15-26 +.. gh-issue: 119555 +.. nonce: mvHbEL +.. section: Library + +Catch :exc:`SyntaxError` from :func:`compile` in the runsource() method of +the InteractiveColoredConsole. Patch by Sergey B Kirpichev. + +.. + +.. date: 2024-05-24-21-54-55 +.. gh-issue: 113892 +.. nonce: JKDFqq +.. section: Library + +Now, the method ``sock_connect`` of :class:`asyncio.ProactorEventLoop` +raises a :exc:`ValueError` if given socket is not in non-blocking mode, as +well as in other loop implementations. + +.. + +.. date: 2024-05-23-22-29-59 +.. gh-issue: 119443 +.. nonce: KAGz6S +.. section: Library + +The interactive REPL no longer runs with ``from __future__ import +annotations`` enabled. Patch by Jelle Zijlstra. + +.. + +.. date: 2024-05-23-11-52-36 +.. gh-issue: 117398 +.. nonce: 2FG1Mk +.. section: Library + +Objects in the datetime C-API are now all statically allocated, which means +better memory safety, especially when the module is reloaded. This should be +transparent to users. + +.. + +.. date: 2024-05-22-21-20-43 +.. gh-issue: 118894 +.. nonce: xHdxR_ +.. section: Library + +:mod:`asyncio` REPL now has the same capabilities as PyREPL. + +.. + +.. date: 2024-05-21-20-13-23 +.. gh-issue: 118911 +.. nonce: iG8nMq +.. section: Library + +In PyREPL, updated ``maybe-accept``'s logic so that if the user hits +:kbd:`Enter` twice, they are able to terminate the block even if there's +trailing whitespace. Also, now when the user hits arrow up, the cursor is on +the last functional line. This matches IPython's behavior. Patch by Aya +Elsayed. + +.. + +.. date: 2024-05-20-20-30-57 +.. gh-issue: 111201 +.. nonce: DAA5lC +.. section: Library + +Remove dependency to :mod:`readline` from the new Python REPL. + +.. + +.. date: 2024-05-19-18-49-04 +.. gh-issue: 119174 +.. nonce: 5GTv7d +.. section: Library + +Fix high DPI causes turtledemo(turtle-graphics examples) windows blurry +Patch by Wulian233 and Terry Jan Reedy + +.. + +.. date: 2024-05-19-13-05-59 +.. gh-issue: 119121 +.. nonce: P1gnh1 +.. section: Library + +Fix a NameError happening in ``asyncio.staggered.staggered_race``. This +function is now tested. + +.. + +.. date: 2024-05-17-17-32-12 +.. gh-issue: 119113 +.. nonce: kEv1Ll +.. section: Library + +Fix issue where :meth:`pathlib.PurePath.with_suffix` didn't raise +:exc:`TypeError` when given ``None`` as a suffix. + +.. + +.. date: 2024-05-16-17-31-46 +.. gh-issue: 118643 +.. nonce: hAWH4C +.. section: Library + +Fix an AttributeError in the :mod:`email` module when re-fold a long address +list. Also fix more cases of incorrect encoding of the address separator in +the address list. + +.. + +.. date: 2024-05-12-21-38-42 +.. gh-issue: 58933 +.. nonce: 0kgU2l +.. section: Library + +Make :mod:`pdb` return to caller frame correctly when ``f_trace`` of the +caller frame is not set + +.. + +.. date: 2024-05-10-05-24-32 +.. gh-issue: 118895 +.. nonce: wUm5r2 +.. section: Library + +Setting attributes on :data:`typing.NoDefault` now raises +:exc:`AttributeError` instead of :exc:`TypeError`. + +.. + +.. date: 2024-05-09-21-36-11 +.. gh-issue: 118868 +.. nonce: uckxxP +.. section: Library + +Fixed issue where kwargs were no longer passed to the logging handler +QueueHandler + +.. + +.. date: 2024-05-09-08-46-12 +.. gh-issue: 118851 +.. nonce: aPAoJw +.. section: Library + +``ctx`` arguments to the constructors of :mod:`ast` node classes now default +to :class:`ast.Load() `. Patch by Jelle Zijlstra. + +.. + +.. date: 2024-05-08-21-30-33 +.. gh-issue: 118760 +.. nonce: XvyMHn +.. section: Library + +Restore the default value of ``tkiter.wantobjects`` to ``1``. + +.. + +.. date: 2024-05-08-21-13-56 +.. gh-issue: 118760 +.. nonce: mdmH3T +.. section: Library + +Fix errors in calling Tkinter bindings on Windows. + +.. + +.. date: 2024-05-08-09-21-49 +.. gh-issue: 118772 +.. nonce: c16E8X +.. section: Library + +Allow :class:`typing.TypeVar` instances without a default to follow +instances without a default in some cases. Patch by Jelle Zijlstra. + +.. + +.. date: 2024-05-01-22-24-05 +.. gh-issue: 110863 +.. nonce: GjYBbq +.. section: Library + +:func:`os.path.realpath` now suppresses any :exc:`OSError` from +:func:`os.readlink` when *strict* mode is disabled (the default). + +.. + +.. date: 2024-04-19-14-59-53 +.. gh-issue: 118033 +.. nonce: amS4Gw +.. section: Library + +Fix :func:`dataclasses.dataclass` not creating a ``__weakref__`` slot when +subclassing :class:`typing.Generic`. + +.. + +.. date: 2024-03-19-21-41-31 +.. gh-issue: 106531 +.. nonce: Mgd--6 +.. section: Library + +In :mod:`importlib.resources`, sync with `importlib_resources 6.3.2 +`_, +including: ``MultiplexedPath`` now expects ``Traversable`` paths, +deprecating string arguments to ``MultiplexedPath``; Enabled support for +resources in namespace packages in zip files; Fixed ``NotADirectoryError`` +when calling files on a subdirectory of a namespace package. + +.. + +.. date: 2024-01-12-08-51-03 +.. gh-issue: 113978 +.. nonce: MqTgB0 +.. section: Library + +Ignore warnings on text completion inside REPL. + +.. + +.. date: 2023-04-28-09-54-15 +.. gh-issue: 103956 +.. nonce: EyLDPS +.. section: Library + +Fix lack of newline characters in :mod:`trace` module output when line +tracing is enabled but source code line for current frame is not available. + +.. + +.. date: 2023-04-26-22-24-17 +.. gh-issue: 92081 +.. nonce: V8xMot +.. section: Library + +Fix missing spaces in email headers when the spaces are mixed with encoded +8-bit characters. + +.. + +.. date: 2023-04-24-05-34-23 +.. gh-issue: 103194 +.. nonce: GwBwWL +.. section: Library + +Prepare Tkinter for C API changes in Tcl 8.7/9.0 to avoid +:class:`!_tkinter.Tcl_Obj` being unexpectedly returned instead of +:class:`bool`, :class:`str`, :class:`bytearray`, or :class:`int`. + +.. + +.. date: 2023-04-10-00-04-37 +.. gh-issue: 87106 +.. nonce: UyBnPQ +.. section: Library + +Fixed handling in :meth:`inspect.Signature.bind` of keyword arguments having +the same name as positional-only arguments when a variadic keyword argument +(e.g. ``**kwargs``) is present. + +.. + +.. bpo: 45767 +.. date: 2022-03-10-16-47-57 +.. nonce: ywmyo1 +.. section: Library + +Fix integer conversion in :func:`os.major`, :func:`os.minor`, and +:func:`os.makedev`. Support device numbers larger than ``2**63-1``. Support +non-existent device number (``NODEV``). + +.. + +.. date: 2019-08-27-01-16-50 +.. gh-issue: 67693 +.. nonce: 4NIAiy +.. section: Library + +Fix :func:`urllib.parse.urlunparse` and :func:`urllib.parse.urlunsplit` for +URIs with path starting with multiple slashes and no authority. Based on +patch by Ashwin Ramaswami. + +.. + +.. date: 2024-05-18-10-59-27 +.. gh-issue: 119050 +.. nonce: g4qiH7 +.. section: Tests + +regrtest test runner: Add XML support to the refleak checker (-R option). +Patch by Victor Stinner. + +.. + +.. date: 2024-05-29-17-40-50 +.. gh-issue: 119729 +.. nonce: k0xJ5U +.. section: Build + +On POSIX systems, the pkg-config (``.pc``) filenames now include the ABI +flags, which may include debug ("d") and free-threaded ("t"). For example: +* ``python-3.14.pc`` (default, non-debug build) * ``python-3.14d.pc`` +(default, debug build) * ``python-3.14t.pc`` (free-threaded build) + +.. + +.. date: 2024-05-19-22-54-55 +.. gh-issue: 115119 +.. nonce: DwMwev +.. section: Build + +Fall back to the bundled libmpdec if a system version cannot be found. + +.. + +.. date: 2024-05-17-19-53-27 +.. gh-issue: 119132 +.. nonce: wepPgM +.. section: Build + +Update :data:`sys.version` to identify whether the build is default build or +free-threading build. Patch By Donghee Na. + +.. + +.. date: 2024-05-13-15-57-58 +.. gh-issue: 118836 +.. nonce: 7yN1iB +.. section: Build + +Fix an ``AssertionError`` when building with ``--enable-experimental-jit`` +and the compiler emits a ``SHT_NOTE`` section. + +.. + +.. date: 2024-05-11-15-11-30 +.. gh-issue: 118943 +.. nonce: VI_MnY +.. section: Build + +Fix a possible race condition affecting parallel builds configured with +``--enable-experimental-jit``, in which compilation errors could be caused +by an incompletely-generated header file. + +.. + +.. date: 2024-05-30-17-39-25 +.. gh-issue: 119679 +.. nonce: mZC87w +.. section: Windows + +Ensures correct import libraries are included in Windows installs. + +.. + +.. date: 2024-05-29-11-06-12 +.. gh-issue: 119690 +.. nonce: 8q6e1p +.. section: Windows + +Adds Unicode support and fixes audit events for ``_winapi.CreateNamedPipe``. + +.. + +.. date: 2024-05-25-18-43-10 +.. gh-issue: 111201 +.. nonce: SLPJIx +.. section: Windows + +Add support for new pyrepl on Windows + +.. + +.. date: 2024-05-22-19-43-29 +.. gh-issue: 119070 +.. nonce: _enton +.. section: Windows + +Fixes ``py.exe`` handling of shebangs like ``/usr/bin/env python3.12``, +which were previously interpreted as ``python3.exe`` instead of +``python3.12.exe``. + +.. + +.. date: 2024-04-24-22-50-33 +.. gh-issue: 117505 +.. nonce: gcTb_p +.. section: Windows + +Fixes an issue with the Windows installer not running ensurepip in a fully +isolated environment. This could cause unexpected interactions with the user +site-packages. + +.. + +.. date: 2024-04-24-05-16-32 +.. gh-issue: 118209 +.. nonce: Ryyzlz +.. section: Windows + +Avoid crashing in :mod:`mmap` on Windows when the mapped memory is +inaccessible due to file system errors or access violations. + +.. + +.. date: 2024-03-19-19-04-56 +.. gh-issue: 116145 +.. nonce: srVT3d +.. section: Windows + +Updated bundled Tcl/Tk to 8.6.14. + +.. + +.. date: 2024-05-29-21-05-59 +.. gh-issue: 119585 +.. nonce: Sn7JL3 +.. section: C API + +Fix crash when a thread state that was created by +:c:func:`PyGILState_Ensure` calls a destructor that during +:c:func:`PyThreadState_Clear` that calls back into +:c:func:`PyGILState_Ensure` and :c:func:`PyGILState_Release`. This might +occur when in the free-threaded build or when using thread-local variables +whose destructors call :c:func:`PyGILState_Ensure`. + +.. + +.. date: 2024-05-22-17-50-48 +.. gh-issue: 119336 +.. nonce: ff3qnS +.. section: C API + +Restore the removed ``_PyLong_NumBits()`` function. It is used by the +pywin32 project. Patch by Ethan Smith + +.. + +.. date: 2024-05-21-11-35-11 +.. gh-issue: 119247 +.. nonce: U6n6mh +.. section: C API + +Added ``Py_BEGIN_CRITICAL_SECTION_SEQUENCE_FAST`` and +``Py_END_CRITICAL_SECTION_SEQUENCE_FAST`` macros to make it possible to use +PySequence_Fast APIs safely when free-threaded, and update str.join to work +without the GIL using them. + +.. + +.. date: 2024-05-20-10-35-22 +.. gh-issue: 111389 +.. nonce: a6axBk +.. section: C API + +Add :c:macro:`PyHASH_MULTIPLIER` constant: prime multiplier used in string +and various other hashes. Patch by Victor Stinner. + +.. + +.. date: 2024-05-08-23-14-06 +.. gh-issue: 116984 +.. nonce: 5sgcDo +.. section: C API + +Make mimalloc includes relative to the current file to avoid embedders or +extensions needing to include ``Internal/mimalloc`` if they are already +including internal CPython headers. + +.. + +.. date: 2024-05-08-20-13-00 +.. gh-issue: 118789 +.. nonce: m88uUa +.. section: C API + +Restore ``_PyWeakref_ClearRef`` that was previously removed in Python 3.13 +alpha 1. diff --git a/Misc/NEWS.d/3.8.0a1.rst b/Misc/NEWS.d/3.8.0a1.rst index 1964a8329979f5..63735c2c9726d1 100644 --- a/Misc/NEWS.d/3.8.0a1.rst +++ b/Misc/NEWS.d/3.8.0a1.rst @@ -6282,7 +6282,7 @@ Add documentation about the new command line interface of the gzip module. .. nonce: YO9CYm .. section: Documentation -chm document displays non-ASCII charaters properly on some MBCS Windows +chm document displays non-ASCII characters properly on some MBCS Windows systems. .. diff --git a/Misc/NEWS.d/3.8.0a4.rst b/Misc/NEWS.d/3.8.0a4.rst index 38fa1324dceb40..7bf0de1210935b 100644 --- a/Misc/NEWS.d/3.8.0a4.rst +++ b/Misc/NEWS.d/3.8.0a4.rst @@ -600,7 +600,7 @@ exceptions. .. nonce: 9sjd38 .. section: Library -Add time module support and fix test_time faiures for VxWorks. +Add time module support and fix test_time failures for VxWorks. .. @@ -843,7 +843,7 @@ Using the code of the ``Tools/scripts/serve.py`` script as an example in the .. nonce: nF1pP1 .. section: Documentation -Added Documention for PyInterpreterState_Main(). +Added documentation for PyInterpreterState_Main(). .. diff --git a/Misc/NEWS.d/3.9.0a1.rst b/Misc/NEWS.d/3.9.0a1.rst index 39d760cdd4fddf..a38b93e4b76d17 100644 --- a/Misc/NEWS.d/3.9.0a1.rst +++ b/Misc/NEWS.d/3.9.0a1.rst @@ -1396,7 +1396,7 @@ way to :func:`email.message.get`. .. section: Library Deprecated the ``split()`` method in :class:`_tkinter.TkappType` in favour -of the ``splitlist()`` method which has more consistent and predicable +of the ``splitlist()`` method which has more consistent and predictable behavior. .. diff --git a/Misc/NEWS.d/3.9.0a6.rst b/Misc/NEWS.d/3.9.0a6.rst index 466ff624fcbf81..b7ea1051c314f2 100644 --- a/Misc/NEWS.d/3.9.0a6.rst +++ b/Misc/NEWS.d/3.9.0a6.rst @@ -635,7 +635,7 @@ script is killed by signal 11, it now logs: "CGI script exit code -11." .. section: Library Improve the error message when triying to import a module using :mod:`runpy` -and incorrently use the ".py" extension at the end of the module name. Patch +and incorrectly using the ".py" extension at the end of the module name. Patch by Pablo Galindo. .. diff --git a/Misc/externals.spdx.json b/Misc/externals.spdx.json index 58f8e0afd71f1b..758d41910054ce 100644 --- a/Misc/externals.spdx.json +++ b/Misc/externals.spdx.json @@ -112,42 +112,42 @@ "checksums": [ { "algorithm": "SHA256", - "checksumValue": "1d3f2015e49e269cf681373d433cd54d88d5ef7443fe87f5f50f5fcfe9003e73" + "checksumValue": "ad7623a44e1b6e42df47ba8f16b2b0435ac605650b5054077c4355a30473074c" } ], - "downloadLocation": "https://github.com/python/cpython-source-deps/archive/refs/tags/tcl-core-8.6.13.1.tar.gz", + "downloadLocation": "https://github.com/python/cpython-source-deps/archive/refs/tags/tcl-core-8.6.14.0.tar.gz", "externalRefs": [ { "referenceCategory": "SECURITY", - "referenceLocator": "cpe:2.3:a:tcl_tk:tcl_tk:8.6.13.1:*:*:*:*:*:*:*", + "referenceLocator": "cpe:2.3:a:tcl_tk:tcl_tk:8.6.14.0:*:*:*:*:*:*:*", "referenceType": "cpe23Type" } ], "licenseConcluded": "NOASSERTION", "name": "tcl-core", "primaryPackagePurpose": "SOURCE", - "versionInfo": "8.6.13.1" + "versionInfo": "8.6.14.0" }, { "SPDXID": "SPDXRef-PACKAGE-tk", "checksums": [ { "algorithm": "SHA256", - "checksumValue": "6056203b8a6aaf6ea89d90a7b55dc7f407e55c093f731a98fd830a712a3c81d3" + "checksumValue": "e8d5cbe97952037962518b69aba85e324d80aa189054c163ab0ee764a448e802" } ], - "downloadLocation": "https://github.com/python/cpython-source-deps/archive/refs/tags/tk-8.6.13.1.tar.gz", + "downloadLocation": "https://github.com/python/cpython-source-deps/archive/refs/tags/tk-8.6.14.0.tar.gz", "externalRefs": [ { "referenceCategory": "SECURITY", - "referenceLocator": "cpe:2.3:a:tcl_tk:tcl_tk:8.6.13.1:*:*:*:*:*:*:*", + "referenceLocator": "cpe:2.3:a:tcl_tk:tcl_tk:8.6.14.0:*:*:*:*:*:*:*", "referenceType": "cpe23Type" } ], "licenseConcluded": "NOASSERTION", "name": "tk", "primaryPackagePurpose": "SOURCE", - "versionInfo": "8.6.13.1" + "versionInfo": "8.6.14.0" }, { "SPDXID": "SPDXRef-PACKAGE-xz", diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c index 574cb8014493c4..1d9534671a4ee8 100644 --- a/Modules/_ctypes/_ctypes.c +++ b/Modules/_ctypes/_ctypes.c @@ -454,20 +454,17 @@ class _ctypes.CType_Type "PyObject *" "clinic_state()->CType_Type" static int CType_Type_traverse(PyObject *self, visitproc visit, void *arg) { - ctypes_state *st = get_module_state_by_def_final(Py_TYPE(self)); - if (st && st->PyCType_Type) { - StgInfo *info; - if (PyStgInfo_FromType(st, self, &info) < 0) { - PyErr_WriteUnraisable(self); - } - if (info) { - Py_VISIT(info->proto); - Py_VISIT(info->argtypes); - Py_VISIT(info->converters); - Py_VISIT(info->restype); - Py_VISIT(info->checker); - Py_VISIT(info->module); - } + StgInfo *info = _PyStgInfo_FromType_NoState(self); + if (!info) { + PyErr_WriteUnraisable(self); + } + if (info) { + Py_VISIT(info->proto); + Py_VISIT(info->argtypes); + Py_VISIT(info->converters); + Py_VISIT(info->restype); + Py_VISIT(info->checker); + Py_VISIT(info->module); } Py_VISIT(Py_TYPE(self)); return PyType_Type.tp_traverse(self, visit, arg); @@ -488,15 +485,12 @@ ctype_clear_stginfo(StgInfo *info) static int CType_Type_clear(PyObject *self) { - ctypes_state *st = get_module_state_by_def_final(Py_TYPE(self)); - if (st && st->PyCType_Type) { - StgInfo *info; - if (PyStgInfo_FromType(st, self, &info) < 0) { - PyErr_WriteUnraisable(self); - } - if (info) { - ctype_clear_stginfo(info); - } + StgInfo *info = _PyStgInfo_FromType_NoState(self); + if (!info) { + PyErr_WriteUnraisable(self); + } + if (info) { + ctype_clear_stginfo(info); } return PyType_Type.tp_clear(self); } @@ -504,22 +498,20 @@ CType_Type_clear(PyObject *self) static void CType_Type_dealloc(PyObject *self) { - ctypes_state *st = get_module_state_by_def_final(Py_TYPE(self)); - if (st && st->PyCType_Type) { - StgInfo *info; - if (PyStgInfo_FromType(st, self, &info) < 0) { - PyErr_WriteUnraisable(self); - } - if (info) { - PyMem_Free(info->ffi_type_pointer.elements); - info->ffi_type_pointer.elements = NULL; - PyMem_Free(info->format); - info->format = NULL; - PyMem_Free(info->shape); - info->shape = NULL; - ctype_clear_stginfo(info); - } + StgInfo *info = _PyStgInfo_FromType_NoState(self); + if (!info) { + PyErr_WriteUnraisable(self); + } + if (info) { + PyMem_Free(info->ffi_type_pointer.elements); + info->ffi_type_pointer.elements = NULL; + PyMem_Free(info->format); + info->format = NULL; + PyMem_Free(info->shape); + info->shape = NULL; + ctype_clear_stginfo(info); } + PyTypeObject *tp = Py_TYPE(self); PyType_Type.tp_dealloc(self); Py_DECREF(tp); @@ -5947,7 +5939,7 @@ module_free(void *module) static PyModuleDef_Slot module_slots[] = { {Py_mod_exec, _ctypes_mod_exec}, - {Py_mod_multiple_interpreters, Py_MOD_MULTIPLE_INTERPRETERS_NOT_SUPPORTED}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {Py_mod_gil, Py_MOD_GIL_NOT_USED}, {0, NULL} }; diff --git a/Modules/_ctypes/clinic/_ctypes.c.h b/Modules/_ctypes/clinic/_ctypes.c.h index 98a84cc14f4386..e1d5a17cbe7d68 100644 --- a/Modules/_ctypes/clinic/_ctypes.c.h +++ b/Modules/_ctypes/clinic/_ctypes.c.h @@ -2,6 +2,9 @@ preserve [clinic start generated code]*/ +#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) +# include "pycore_runtime.h" // _Py_SINGLETON() +#endif #include "pycore_abstract.h" // _PyNumber_Index() #include "pycore_modsupport.h" // _PyArg_UnpackKeywords() @@ -607,4 +610,4 @@ Simple_from_outparm(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py } return Simple_from_outparm_impl(self, cls); } -/*[clinic end generated code: output=9c6539a3559e6088 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=a90886be2a294ee6 input=a9049054013a1b77]*/ diff --git a/Modules/_ctypes/ctypes.h b/Modules/_ctypes/ctypes.h index 20c68134be2804..d46675ae996b43 100644 --- a/Modules/_ctypes/ctypes.h +++ b/Modules/_ctypes/ctypes.h @@ -101,20 +101,6 @@ get_module_state_by_def(PyTypeObject *cls) return get_module_state(mod); } -static inline ctypes_state * -get_module_state_by_def_final(PyTypeObject *cls) -{ - if (cls->tp_mro == NULL) { - return NULL; - } - PyObject *mod = PyType_GetModuleByDef(cls, &_ctypesmodule); - if (mod == NULL) { - PyErr_Clear(); - return NULL; - } - return get_module_state(mod); -} - extern PyType_Spec carg_spec; extern PyType_Spec cfield_spec; @@ -508,6 +494,20 @@ PyStgInfo_FromAny(ctypes_state *state, PyObject *obj, StgInfo **result) return _stginfo_from_type(state, Py_TYPE(obj), result); } +/* A variant of PyStgInfo_FromType that doesn't need the state, + * so it can be called from finalization functions when the module + * state is torn down. Does no checks; cannot fail. + * This inlines the current implementation PyObject_GetTypeData, + * so it might break in the future. + */ +static inline StgInfo * +_PyStgInfo_FromType_NoState(PyObject *type) +{ + size_t type_basicsize =_Py_SIZE_ROUND_UP(PyType_Type.tp_basicsize, + ALIGNOF_MAX_ALIGN_T); + return (StgInfo *)((char *)type + type_basicsize); +} + // Initialize StgInfo on a newly created type static inline StgInfo * PyStgInfo_Init(ctypes_state *state, PyTypeObject *type) diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c index 8164715a66ff09..d6fa273c75e15e 100644 --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -25,24 +25,20 @@ # include /* struct timeval */ #endif -#define PyDate_Check(op) PyObject_TypeCheck(op, &PyDateTime_DateType) -#define PyDate_CheckExact(op) Py_IS_TYPE(op, &PyDateTime_DateType) -#define PyDateTime_Check(op) PyObject_TypeCheck(op, &PyDateTime_DateTimeType) -#define PyDateTime_CheckExact(op) Py_IS_TYPE(op, &PyDateTime_DateTimeType) - -#define PyTime_Check(op) PyObject_TypeCheck(op, &PyDateTime_TimeType) -#define PyTime_CheckExact(op) Py_IS_TYPE(op, &PyDateTime_TimeType) - -#define PyDelta_Check(op) PyObject_TypeCheck(op, &PyDateTime_DeltaType) -#define PyDelta_CheckExact(op) Py_IS_TYPE(op, &PyDateTime_DeltaType) - -#define PyTZInfo_Check(op) PyObject_TypeCheck(op, &PyDateTime_TZInfoType) -#define PyTZInfo_CheckExact(op) Py_IS_TYPE(op, &PyDateTime_TZInfoType) +/* forward declarations */ +static PyTypeObject PyDateTime_DateType; +static PyTypeObject PyDateTime_DateTimeType; +static PyTypeObject PyDateTime_TimeType; +static PyTypeObject PyDateTime_DeltaType; +static PyTypeObject PyDateTime_TZInfoType; +static PyTypeObject PyDateTime_TimeZoneType; -#define PyTimezone_Check(op) PyObject_TypeCheck(op, &PyDateTime_TimeZoneType) typedef struct { + /* Module heap types. */ + PyTypeObject *isocalendar_date_type; + /* Conversion factors. */ PyObject *us_per_ms; // 1_000 PyObject *us_per_second; // 1_000_000 @@ -52,16 +48,192 @@ typedef struct { PyObject *us_per_week; // 1e6 * 3600 * 24 * 7 as Python int PyObject *seconds_per_day; // 3600 * 24 as Python int - /* The interned UTC timezone instance */ - PyObject *utc; - /* The interned Unix epoch datetime instance */ PyObject *epoch; } datetime_state; -static datetime_state _datetime_global_state; +/* The module has a fixed number of static objects, due to being exposed + * through the datetime C-API. There are five types exposed directly, + * one type exposed indirectly, and one singleton constant (UTC). + * + * Each of these objects is hidden behind a macro in the same way as + * the per-module objects stored in module state. The macros for the + * static objects don't need to be passed a state, but the consistency + * of doing so is more clear. We use a dedicated noop macro, NO_STATE, + * to make the special case obvious. */ + +#define NO_STATE NULL + +#define DATE_TYPE(st) &PyDateTime_DateType +#define DATETIME_TYPE(st) &PyDateTime_DateTimeType +#define TIME_TYPE(st) &PyDateTime_TimeType +#define DELTA_TYPE(st) &PyDateTime_DeltaType +#define TZINFO_TYPE(st) &PyDateTime_TZInfoType +#define TIMEZONE_TYPE(st) &PyDateTime_TimeZoneType +#define ISOCALENDAR_DATE_TYPE(st) st->isocalendar_date_type + +#define PyDate_Check(op) PyObject_TypeCheck(op, DATE_TYPE(NO_STATE)) +#define PyDate_CheckExact(op) Py_IS_TYPE(op, DATE_TYPE(NO_STATE)) + +#define PyDateTime_Check(op) PyObject_TypeCheck(op, DATETIME_TYPE(NO_STATE)) +#define PyDateTime_CheckExact(op) Py_IS_TYPE(op, DATETIME_TYPE(NO_STATE)) + +#define PyTime_Check(op) PyObject_TypeCheck(op, TIME_TYPE(NO_STATE)) +#define PyTime_CheckExact(op) Py_IS_TYPE(op, TIME_TYPE(NO_STATE)) + +#define PyDelta_Check(op) PyObject_TypeCheck(op, DELTA_TYPE(NO_STATE)) +#define PyDelta_CheckExact(op) Py_IS_TYPE(op, DELTA_TYPE(NO_STATE)) + +#define PyTZInfo_Check(op) PyObject_TypeCheck(op, TZINFO_TYPE(NO_STATE)) +#define PyTZInfo_CheckExact(op) Py_IS_TYPE(op, TZINFO_TYPE(NO_STATE)) + +#define PyTimezone_Check(op) PyObject_TypeCheck(op, TIMEZONE_TYPE(NO_STATE)) + +#define CONST_US_PER_MS(st) st->us_per_ms +#define CONST_US_PER_SECOND(st) st->us_per_second +#define CONST_US_PER_MINUTE(st) st->us_per_minute +#define CONST_US_PER_HOUR(st) st->us_per_hour +#define CONST_US_PER_DAY(st) st->us_per_day +#define CONST_US_PER_WEEK(st) st->us_per_week +#define CONST_SEC_PER_DAY(st) st->seconds_per_day +#define CONST_EPOCH(st) st->epoch +#define CONST_UTC(st) ((PyObject *)&utc_timezone) + +static datetime_state * +get_module_state(PyObject *module) +{ + void *state = _PyModule_GetState(module); + assert(state != NULL); + return (datetime_state *)state; +} + + +#define INTERP_KEY ((PyObject *)&_Py_ID(cached_datetime_module)) + +static PyObject * +get_current_module(PyInterpreterState *interp, int *p_reloading) +{ + PyObject *mod = NULL; + int reloading = 0; + + PyObject *dict = PyInterpreterState_GetDict(interp); + if (dict == NULL) { + goto error; + } + PyObject *ref = NULL; + if (PyDict_GetItemRef(dict, INTERP_KEY, &ref) < 0) { + goto error; + } + if (ref != NULL) { + reloading = 1; + if (ref != Py_None) { + (void)PyWeakref_GetRef(ref, &mod); + if (mod == Py_None) { + Py_CLEAR(mod); + } + Py_DECREF(ref); + } + } + if (p_reloading != NULL) { + *p_reloading = reloading; + } + return mod; + +error: + assert(PyErr_Occurred()); + return NULL; +} + +static PyModuleDef datetimemodule; + +static datetime_state * +_get_current_state(PyObject **p_mod) +{ + PyInterpreterState *interp = PyInterpreterState_Get(); + PyObject *mod = get_current_module(interp, NULL); + if (mod == NULL) { + assert(!PyErr_Occurred()); + if (PyErr_Occurred()) { + return NULL; + } + /* The static types can outlive the module, + * so we must re-import the module. */ + mod = PyImport_ImportModule("_datetime"); + if (mod == NULL) { + return NULL; + } + } + datetime_state *st = get_module_state(mod); + *p_mod = mod; + return st; +} + +#define GET_CURRENT_STATE(MOD_VAR) \ + _get_current_state(&MOD_VAR) +#define RELEASE_CURRENT_STATE(ST_VAR, MOD_VAR) \ + Py_DECREF(MOD_VAR) + +static int +set_current_module(PyInterpreterState *interp, PyObject *mod) +{ + assert(mod != NULL); + PyObject *dict = PyInterpreterState_GetDict(interp); + if (dict == NULL) { + return -1; + } + PyObject *ref = PyWeakref_NewRef(mod, NULL); + if (ref == NULL) { + return -1; + } + int rc = PyDict_SetItem(dict, INTERP_KEY, ref); + Py_DECREF(ref); + return rc; +} + +static void +clear_current_module(PyInterpreterState *interp, PyObject *expected) +{ + PyObject *exc = PyErr_GetRaisedException(); + + PyObject *dict = PyInterpreterState_GetDict(interp); + if (dict == NULL) { + goto error; + } + + if (expected != NULL) { + PyObject *ref = NULL; + if (PyDict_GetItemRef(dict, INTERP_KEY, &ref) < 0) { + goto error; + } + if (ref != NULL) { + PyObject *current = NULL; + int rc = PyWeakref_GetRef(ref, ¤t); + /* We only need "current" for pointer comparison. */ + Py_XDECREF(current); + Py_DECREF(ref); + if (rc < 0) { + goto error; + } + if (current != expected) { + goto finally; + } + } + } + + /* We use None to identify that the module was previously loaded. */ + if (PyDict_SetItem(dict, INTERP_KEY, Py_None) < 0) { + goto error; + } + + goto finally; + +error: + PyErr_WriteUnraisable(NULL); + +finally: + PyErr_SetRaisedException(exc); +} -#define STATIC_STATE() (&_datetime_global_state) /* We require that C int be at least 32 bits, and use int virtually * everywhere. In just a few cases we use a temp long, where a Python @@ -142,25 +314,16 @@ static datetime_state _datetime_global_state; */ #define MONTH_IS_SANE(M) ((unsigned int)(M) - 1 < 12) -/* Forward declarations. */ -static PyTypeObject PyDateTime_DateType; -static PyTypeObject PyDateTime_DateTimeType; -static PyTypeObject PyDateTime_DeltaType; -static PyTypeObject PyDateTime_IsoCalendarDateType; -static PyTypeObject PyDateTime_TimeType; -static PyTypeObject PyDateTime_TZInfoType; -static PyTypeObject PyDateTime_TimeZoneType; - static int check_tzinfo_subclass(PyObject *p); /*[clinic input] module datetime -class datetime.datetime "PyDateTime_DateTime *" "&PyDateTime_DateTimeType" -class datetime.date "PyDateTime_Date *" "&PyDateTime_DateType" -class datetime.time "PyDateTime_Time *" "&PyDateTime_TimeType" -class datetime.IsoCalendarDate "PyDateTime_IsoCalendarDate *" "&PyDateTime_IsoCalendarDateType" +class datetime.datetime "PyDateTime_DateTime *" "get_datetime_state()->datetime_type" +class datetime.date "PyDateTime_Date *" "get_datetime_state()->date_type" +class datetime.time "PyDateTime_Time *" "get_datetime_state()->time_type" +class datetime.IsoCalendarDate "PyDateTime_IsoCalendarDate *" "get_datetime_state()->isocalendar_date_type" [clinic start generated code]*/ -/*[clinic end generated code: output=da39a3ee5e6b4b0d input=6f65a48dd22fa40f]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=c8f3d834a860d50a]*/ #include "clinic/_datetimemodule.c.h" @@ -979,7 +1142,7 @@ new_date_ex(int year, int month, int day, PyTypeObject *type) } #define new_date(year, month, day) \ - new_date_ex(year, month, day, &PyDateTime_DateType) + new_date_ex(year, month, day, DATE_TYPE(NO_STATE)) // Forward declaration static PyObject * @@ -991,10 +1154,10 @@ new_date_subclass_ex(int year, int month, int day, PyObject *cls) { PyObject *result; // We have "fast path" constructors for two subclasses: date and datetime - if ((PyTypeObject *)cls == &PyDateTime_DateType) { + if ((PyTypeObject *)cls == DATE_TYPE(NO_STATE)) { result = new_date_ex(year, month, day, (PyTypeObject *)cls); } - else if ((PyTypeObject *)cls == &PyDateTime_DateTimeType) { + else if ((PyTypeObject *)cls == DATETIME_TYPE(NO_STATE)) { result = new_datetime_ex(year, month, day, 0, 0, 0, 0, Py_None, (PyTypeObject *)cls); } @@ -1048,8 +1211,7 @@ new_datetime_ex(int year, int month, int day, int hour, int minute, } #define new_datetime(y, m, d, hh, mm, ss, us, tzinfo, fold) \ - new_datetime_ex2(y, m, d, hh, mm, ss, us, tzinfo, fold, \ - &PyDateTime_DateTimeType) + new_datetime_ex2(y, m, d, hh, mm, ss, us, tzinfo, fold, DATETIME_TYPE(NO_STATE)) static PyObject * call_subclass_fold(PyObject *cls, int fold, const char *format, ...) @@ -1088,9 +1250,10 @@ call_subclass_fold(PyObject *cls, int fold, const char *format, ...) static PyObject * new_datetime_subclass_fold_ex(int year, int month, int day, int hour, int minute, int second, int usecond, PyObject *tzinfo, - int fold, PyObject *cls) { + int fold, PyObject *cls) +{ PyObject* dt; - if ((PyTypeObject*)cls == &PyDateTime_DateTimeType) { + if ((PyTypeObject*)cls == DATETIME_TYPE(NO_STATE)) { // Use the fast path constructor dt = new_datetime(year, month, day, hour, minute, second, usecond, tzinfo, fold); @@ -1151,15 +1314,15 @@ new_time_ex(int hour, int minute, int second, int usecond, return new_time_ex2(hour, minute, second, usecond, tzinfo, 0, type); } -#define new_time(hh, mm, ss, us, tzinfo, fold) \ - new_time_ex2(hh, mm, ss, us, tzinfo, fold, &PyDateTime_TimeType) +#define new_time(hh, mm, ss, us, tzinfo, fold) \ + new_time_ex2(hh, mm, ss, us, tzinfo, fold, TIME_TYPE(NO_STATE)) static PyObject * new_time_subclass_fold_ex(int hour, int minute, int second, int usecond, PyObject *tzinfo, int fold, PyObject *cls) { PyObject *t; - if ((PyTypeObject*)cls == &PyDateTime_TimeType) { + if ((PyTypeObject*)cls == TIME_TYPE(NO_STATE)) { // Use the fast path constructor t = new_time(hour, minute, second, usecond, tzinfo, fold); } @@ -1172,6 +1335,8 @@ new_time_subclass_fold_ex(int hour, int minute, int second, int usecond, return t; } +static PyDateTime_Delta * look_up_delta(int, int, int, PyTypeObject *); + /* Create a timedelta instance. Normalize the members iff normalize is * true. Passing false is a speed optimization, if you know for sure * that seconds and microseconds are already in their proper ranges. In any @@ -1192,6 +1357,12 @@ new_delta_ex(int days, int seconds, int microseconds, int normalize, if (check_delta_day_range(days) < 0) return NULL; + self = look_up_delta(days, seconds, microseconds, type); + if (self != NULL) { + return (PyObject *)self; + } + assert(!PyErr_Occurred()); + self = (PyDateTime_Delta *) (type->tp_alloc(type, 0)); if (self != NULL) { self->hashcode = -1; @@ -1203,7 +1374,7 @@ new_delta_ex(int days, int seconds, int microseconds, int normalize, } #define new_delta(d, s, us, normalize) \ - new_delta_ex(d, s, us, normalize, &PyDateTime_DeltaType) + new_delta_ex(d, s, us, normalize, DELTA_TYPE(NO_STATE)) typedef struct @@ -1213,6 +1384,8 @@ typedef struct PyObject *name; } PyDateTime_TimeZone; +static PyDateTime_TimeZone * look_up_timezone(PyObject *offset, PyObject *name); + /* Create new timezone instance checking offset range. This function does not check the name argument. Caller must assure that offset is a timedelta instance and name is either NULL @@ -1221,12 +1394,18 @@ static PyObject * create_timezone(PyObject *offset, PyObject *name) { PyDateTime_TimeZone *self; - PyTypeObject *type = &PyDateTime_TimeZoneType; + PyTypeObject *type = TIMEZONE_TYPE(NO_STATE); assert(offset != NULL); assert(PyDelta_Check(offset)); assert(name == NULL || PyUnicode_Check(name)); + self = look_up_timezone(offset, name); + if (self != NULL) { + return (PyObject *)self; + } + assert(!PyErr_Occurred()); + self = (PyDateTime_TimeZone *)(type->tp_alloc(type, 0)); if (self == NULL) { return NULL; @@ -1237,6 +1416,7 @@ create_timezone(PyObject *offset, PyObject *name) } static int delta_bool(PyDateTime_Delta *self); +static PyDateTime_TimeZone utc_timezone; static PyObject * new_timezone(PyObject *offset, PyObject *name) @@ -1246,8 +1426,7 @@ new_timezone(PyObject *offset, PyObject *name) assert(name == NULL || PyUnicode_Check(name)); if (name == NULL && delta_bool((PyDateTime_Delta *)offset) == 0) { - datetime_state *st = STATIC_STATE(); - return Py_NewRef(st->utc); + return Py_NewRef(CONST_UTC(NO_STATE)); } if ((GET_TD_DAYS(offset) == -1 && GET_TD_SECONDS(offset) == 0 && @@ -1460,8 +1639,7 @@ tzinfo_from_isoformat_results(int rv, int tzoffset, int tz_useconds) if (rv == 1) { // Create a timezone from offset in seconds (0 returns UTC) if (tzoffset == 0) { - datetime_state *st = STATIC_STATE(); - return Py_NewRef(st->utc); + return Py_NewRef(CONST_UTC(NO_STATE)); } PyObject *delta = new_delta(0, tzoffset, tz_useconds, 1); @@ -1890,11 +2068,13 @@ delta_to_microseconds(PyDateTime_Delta *self) PyObject *x3 = NULL; PyObject *result = NULL; + PyObject *current_mod = NULL; + datetime_state *st = GET_CURRENT_STATE(current_mod); + x1 = PyLong_FromLong(GET_TD_DAYS(self)); if (x1 == NULL) goto Done; - datetime_state *st = STATIC_STATE(); - x2 = PyNumber_Multiply(x1, st->seconds_per_day); /* days in seconds */ + x2 = PyNumber_Multiply(x1, CONST_SEC_PER_DAY(st)); /* days in seconds */ if (x2 == NULL) goto Done; Py_SETREF(x1, NULL); @@ -1911,7 +2091,7 @@ delta_to_microseconds(PyDateTime_Delta *self) /* x1 = */ x2 = NULL; /* x3 has days+seconds in seconds */ - x1 = PyNumber_Multiply(x3, st->us_per_second); /* us */ + x1 = PyNumber_Multiply(x3, CONST_US_PER_SECOND(st)); /* us */ if (x1 == NULL) goto Done; Py_SETREF(x3, NULL); @@ -1927,6 +2107,7 @@ delta_to_microseconds(PyDateTime_Delta *self) Py_XDECREF(x1); Py_XDECREF(x2); Py_XDECREF(x3); + RELEASE_CURRENT_STATE(st, current_mod); return result; } @@ -1966,8 +2147,10 @@ microseconds_to_delta_ex(PyObject *pyus, PyTypeObject *type) PyObject *num = NULL; PyObject *result = NULL; - datetime_state *st = STATIC_STATE(); - tuple = checked_divmod(pyus, st->us_per_second); + PyObject *current_mod = NULL; + datetime_state *st = GET_CURRENT_STATE(current_mod); + + tuple = checked_divmod(pyus, CONST_US_PER_SECOND(st)); if (tuple == NULL) { goto Done; } @@ -1985,7 +2168,7 @@ microseconds_to_delta_ex(PyObject *pyus, PyTypeObject *type) num = Py_NewRef(PyTuple_GET_ITEM(tuple, 0)); /* leftover seconds */ Py_DECREF(tuple); - tuple = checked_divmod(num, st->seconds_per_day); + tuple = checked_divmod(num, CONST_SEC_PER_DAY(st)); if (tuple == NULL) goto Done; Py_DECREF(num); @@ -2010,6 +2193,7 @@ microseconds_to_delta_ex(PyObject *pyus, PyTypeObject *type) Done: Py_XDECREF(tuple); Py_XDECREF(num); + RELEASE_CURRENT_STATE(st, current_mod); return result; BadDivmod: @@ -2019,7 +2203,7 @@ microseconds_to_delta_ex(PyObject *pyus, PyTypeObject *type) } #define microseconds_to_delta(pymicros) \ - microseconds_to_delta_ex(pymicros, &PyDateTime_DeltaType) + microseconds_to_delta_ex(pymicros, DELTA_TYPE(NO_STATE)) static PyObject * multiply_int_timedelta(PyObject *intobj, PyDateTime_Delta *delta) @@ -2547,6 +2731,9 @@ delta_new(PyTypeObject *type, PyObject *args, PyObject *kw) { PyObject *self = NULL; + PyObject *current_mod = NULL; + datetime_state *st = GET_CURRENT_STATE(current_mod); + /* Argument objects. */ PyObject *day = NULL; PyObject *second = NULL; @@ -2585,29 +2772,28 @@ delta_new(PyTypeObject *type, PyObject *args, PyObject *kw) y = accum("microseconds", x, us, _PyLong_GetOne(), &leftover_us); CLEANUP; } - datetime_state *st = STATIC_STATE(); if (ms) { - y = accum("milliseconds", x, ms, st->us_per_ms, &leftover_us); + y = accum("milliseconds", x, ms, CONST_US_PER_MS(st), &leftover_us); CLEANUP; } if (second) { - y = accum("seconds", x, second, st->us_per_second, &leftover_us); + y = accum("seconds", x, second, CONST_US_PER_SECOND(st), &leftover_us); CLEANUP; } if (minute) { - y = accum("minutes", x, minute, st->us_per_minute, &leftover_us); + y = accum("minutes", x, minute, CONST_US_PER_MINUTE(st), &leftover_us); CLEANUP; } if (hour) { - y = accum("hours", x, hour, st->us_per_hour, &leftover_us); + y = accum("hours", x, hour, CONST_US_PER_HOUR(st), &leftover_us); CLEANUP; } if (day) { - y = accum("days", x, day, st->us_per_day, &leftover_us); + y = accum("days", x, day, CONST_US_PER_DAY(st), &leftover_us); CLEANUP; } if (week) { - y = accum("weeks", x, week, st->us_per_week, &leftover_us); + y = accum("weeks", x, week, CONST_US_PER_WEEK(st), &leftover_us); CLEANUP; } if (leftover_us) { @@ -2649,7 +2835,9 @@ delta_new(PyTypeObject *type, PyObject *args, PyObject *kw) self = microseconds_to_delta_ex(x, type); Py_DECREF(x); + Done: + RELEASE_CURRENT_STATE(st, current_mod); return self; #undef CLEANUP @@ -2762,9 +2950,12 @@ delta_total_seconds(PyObject *self, PyObject *Py_UNUSED(ignored)) if (total_microseconds == NULL) return NULL; - datetime_state *st = STATIC_STATE(); - total_seconds = PyNumber_TrueDivide(total_microseconds, st->us_per_second); + PyObject *current_mod = NULL; + datetime_state *st = GET_CURRENT_STATE(current_mod); + total_seconds = PyNumber_TrueDivide(total_microseconds, CONST_US_PER_SECOND(st)); + + RELEASE_CURRENT_STATE(st, current_mod); Py_DECREF(total_microseconds); return total_seconds; } @@ -2885,6 +3076,25 @@ static PyTypeObject PyDateTime_DeltaType = { 0, /* tp_free */ }; +// XXX Can we make this const? +static PyDateTime_Delta zero_delta = { + PyObject_HEAD_INIT(&PyDateTime_DeltaType) + /* Letting this be set lazily is a benign race. */ + .hashcode = -1, +}; + +static PyDateTime_Delta * +look_up_delta(int days, int seconds, int microseconds, PyTypeObject *type) +{ + if (days == 0 && seconds == 0 && microseconds == 0 + && type == zero_delta.ob_base.ob_type) + { + return &zero_delta; + } + return NULL; +} + + /* * PyDateTime_Date implementation. */ @@ -3415,17 +3625,40 @@ static PyMethodDef iso_calendar_date_methods[] = { {NULL, NULL}, }; -static PyTypeObject PyDateTime_IsoCalendarDateType = { - PyVarObject_HEAD_INIT(NULL, 0) - .tp_name = "datetime.IsoCalendarDate", - .tp_basicsize = sizeof(PyDateTime_IsoCalendarDate), - .tp_repr = (reprfunc) iso_calendar_date_repr, - .tp_flags = Py_TPFLAGS_DEFAULT, - .tp_doc = iso_calendar_date__doc__, - .tp_methods = iso_calendar_date_methods, - .tp_getset = iso_calendar_date_getset, - // .tp_base = &PyTuple_Type, // filled in PyInit__datetime - .tp_new = iso_calendar_date_new, +static int +iso_calendar_date_traverse(PyDateTime_IsoCalendarDate *self, visitproc visit, + void *arg) +{ + Py_VISIT(Py_TYPE(self)); + return PyTuple_Type.tp_traverse((PyObject *)self, visit, arg); +} + +static void +iso_calendar_date_dealloc(PyDateTime_IsoCalendarDate *self) +{ + PyTypeObject *tp = Py_TYPE(self); + PyTuple_Type.tp_dealloc((PyObject *)self); // delegate GC-untrack as well + Py_DECREF(tp); +} + +static PyType_Slot isocal_slots[] = { + {Py_tp_repr, iso_calendar_date_repr}, + {Py_tp_doc, (void *)iso_calendar_date__doc__}, + {Py_tp_methods, iso_calendar_date_methods}, + {Py_tp_getset, iso_calendar_date_getset}, + {Py_tp_new, iso_calendar_date_new}, + {Py_tp_dealloc, iso_calendar_date_dealloc}, + {Py_tp_traverse, iso_calendar_date_traverse}, + {0, NULL}, +}; + +static PyType_Spec isocal_spec = { + .name = "datetime.IsoCalendarDate", + .basicsize = sizeof(PyDateTime_IsoCalendarDate), + .flags = (Py_TPFLAGS_DEFAULT | + Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = isocal_slots, }; /*[clinic input] @@ -3475,8 +3708,12 @@ date_isocalendar(PyDateTime_Date *self, PyObject *Py_UNUSED(ignored)) week = 0; } - PyObject* v = iso_calendar_date_new_impl(&PyDateTime_IsoCalendarDateType, - year, week + 1, day + 1); + PyObject *current_mod = NULL; + datetime_state *st = GET_CURRENT_STATE(current_mod); + + PyObject *v = iso_calendar_date_new_impl(ISOCALENDAR_DATE_TYPE(st), + year, week + 1, day + 1); + RELEASE_CURRENT_STATE(st, current_mod); if (v == NULL) { return NULL; } @@ -3946,7 +4183,7 @@ timezone_new(PyTypeObject *type, PyObject *args, PyObject *kw) PyObject *offset; PyObject *name = NULL; if (PyArg_ParseTupleAndKeywords(args, kw, "O!|U:timezone", timezone_kws, - &PyDateTime_DeltaType, &offset, &name)) + DELTA_TYPE(NO_STATE), &offset, &name)) return new_timezone(offset, name); return NULL; @@ -3999,8 +4236,7 @@ timezone_repr(PyDateTime_TimeZone *self) to use Py_TYPE(self)->tp_name here. */ const char *type_name = Py_TYPE(self)->tp_name; - datetime_state *st = STATIC_STATE(); - if (((PyObject *)self) == st->utc) { + if ((PyObject *)self == CONST_UTC(NO_STATE)) { return PyUnicode_FromFormat("%s.utc", type_name); } @@ -4022,8 +4258,7 @@ timezone_str(PyDateTime_TimeZone *self) if (self->name != NULL) { return Py_NewRef(self->name); } - datetime_state *st = STATIC_STATE(); - if ((PyObject *)self == st->utc || + if ((PyObject *)self == CONST_UTC(NO_STATE) || (GET_TD_DAYS(self->offset) == 0 && GET_TD_SECONDS(self->offset) == 0 && GET_TD_MICROSECONDS(self->offset) == 0)) @@ -4175,6 +4410,23 @@ static PyTypeObject PyDateTime_TimeZoneType = { timezone_new, /* tp_new */ }; +// XXX Can we make this const? +static PyDateTime_TimeZone utc_timezone = { + PyObject_HEAD_INIT(&PyDateTime_TimeZoneType) + .offset = (PyObject *)&zero_delta, + .name = NULL, +}; + +static PyDateTime_TimeZone * +look_up_timezone(PyObject *offset, PyObject *name) +{ + if (offset == utc_timezone.offset && name == NULL) { + return (PyDateTime_TimeZone *)CONST_UTC(NO_STATE); + } + return NULL; +} + + /* * PyDateTime_Time implementation. */ @@ -4686,7 +4938,7 @@ time_fromisoformat(PyObject *cls, PyObject *tstr) { } PyObject *t; - if ( (PyTypeObject *)cls == &PyDateTime_TimeType ) { + if ( (PyTypeObject *)cls == TIME_TYPE(NO_STATE)) { t = new_time(hour, minute, second, microsecond, tzinfo, 0); } else { t = PyObject_CallFunction(cls, "iiiiO", @@ -5285,8 +5537,8 @@ datetime_combine(PyObject *cls, PyObject *args, PyObject *kw) PyObject *result = NULL; if (PyArg_ParseTupleAndKeywords(args, kw, "O!O!|O:combine", keywords, - &PyDateTime_DateType, &date, - &PyDateTime_TimeType, &time, &tzinfo)) { + DATE_TYPE(NO_STATE), &date, + TIME_TYPE(NO_STATE), &time, &tzinfo)) { if (tzinfo == NULL) { if (HASTZINFO(time)) tzinfo = ((PyDateTime_Time *)time)->tzinfo; @@ -6170,10 +6422,14 @@ local_timezone(PyDateTime_DateTime *utc_time) PyObject *one_second; PyObject *seconds; - datetime_state *st = STATIC_STATE(); - delta = datetime_subtract((PyObject *)utc_time, st->epoch); + PyObject *current_mod = NULL; + datetime_state *st = GET_CURRENT_STATE(current_mod); + + delta = datetime_subtract((PyObject *)utc_time, CONST_EPOCH(st)); + RELEASE_CURRENT_STATE(st, current_mod); if (delta == NULL) return NULL; + one_second = new_delta(0, 1, 0, 0); if (one_second == NULL) { Py_DECREF(delta); @@ -6283,7 +6539,6 @@ datetime_astimezone(PyDateTime_DateTime *self, PyObject *args, PyObject *kw) if (result == NULL) return NULL; - datetime_state *st = STATIC_STATE(); /* Make sure result is aware and UTC. */ if (!HASTZINFO(result)) { temp = (PyObject *)result; @@ -6295,7 +6550,7 @@ datetime_astimezone(PyDateTime_DateTime *self, PyObject *args, PyObject *kw) DATE_GET_MINUTE(result), DATE_GET_SECOND(result), DATE_GET_MICROSECOND(result), - st->utc, + CONST_UTC(NO_STATE), DATE_GET_FOLD(result), Py_TYPE(result)); Py_DECREF(temp); @@ -6304,7 +6559,7 @@ datetime_astimezone(PyDateTime_DateTime *self, PyObject *args, PyObject *kw) } else { /* Result is already aware - just replace tzinfo. */ - Py_SETREF(result->tzinfo, Py_NewRef(st->utc)); + Py_SETREF(result->tzinfo, Py_NewRef(CONST_UTC(NO_STATE))); } /* Attach new tzinfo and let fromutc() do the rest. */ @@ -6408,9 +6663,12 @@ datetime_timestamp(PyDateTime_DateTime *self, PyObject *Py_UNUSED(ignored)) PyObject *result; if (HASTZINFO(self) && self->tzinfo != Py_None) { - datetime_state *st = STATIC_STATE(); + PyObject *current_mod = NULL; + datetime_state *st = GET_CURRENT_STATE(current_mod); + PyObject *delta; - delta = datetime_subtract((PyObject *)self, st->epoch); + delta = datetime_subtract((PyObject *)self, CONST_EPOCH(st)); + RELEASE_CURRENT_STATE(st, current_mod); if (delta == NULL) return NULL; result = delta_total_seconds(delta, NULL); @@ -6699,11 +6957,45 @@ static PyTypeObject PyDateTime_DateTimeType = { }; /* --------------------------------------------------------------------------- - * Module methods and initialization. + * datetime C-API. */ -static PyMethodDef module_methods[] = { - {NULL, NULL} +static PyTypeObject * const capi_types[] = { + &PyDateTime_DateType, + &PyDateTime_DateTimeType, + &PyDateTime_TimeType, + &PyDateTime_DeltaType, + &PyDateTime_TZInfoType, + /* Indirectly, via the utc object. */ + &PyDateTime_TimeZoneType, +}; + +/* The C-API is process-global. This violates interpreter isolation + * due to the objects stored here. Thus each of those objects must + * be managed carefully. */ +// XXX Can we make this const? +static PyDateTime_CAPI capi = { + /* The classes must be readied before used here. + * That will happen the first time the module is loaded. + * They aren't safe to be shared between interpreters, + * but that's okay as long as the module is single-phase init. */ + .DateType = &PyDateTime_DateType, + .DateTimeType = &PyDateTime_DateTimeType, + .TimeType = &PyDateTime_TimeType, + .DeltaType = &PyDateTime_DeltaType, + .TZInfoType = &PyDateTime_TZInfoType, + + .TimeZone_UTC = (PyObject *)&utc_timezone, + + .Date_FromDate = new_date_ex, + .DateTime_FromDateAndTime = new_datetime_ex, + .Time_FromTime = new_time_ex, + .Delta_FromDelta = new_delta_ex, + .TimeZone_FromTimeZone = new_timezone, + .DateTime_FromTimestamp = datetime_fromtimestamp, + .Date_FromTimestamp = datetime_date_fromtimestamp_capi, + .DateTime_FromDateAndTimeAndFold = new_datetime_ex2, + .Time_FromTimeAndFold = new_time_ex2, }; /* Get a new C API by calling this function. @@ -6712,55 +7004,7 @@ static PyMethodDef module_methods[] = { static inline PyDateTime_CAPI * get_datetime_capi(void) { - PyDateTime_CAPI *capi = PyMem_Malloc(sizeof(PyDateTime_CAPI)); - if (capi == NULL) { - PyErr_NoMemory(); - return NULL; - } - capi->DateType = &PyDateTime_DateType; - capi->DateTimeType = &PyDateTime_DateTimeType; - capi->TimeType = &PyDateTime_TimeType; - capi->DeltaType = &PyDateTime_DeltaType; - capi->TZInfoType = &PyDateTime_TZInfoType; - capi->Date_FromDate = new_date_ex; - capi->DateTime_FromDateAndTime = new_datetime_ex; - capi->Time_FromTime = new_time_ex; - capi->Delta_FromDelta = new_delta_ex; - capi->TimeZone_FromTimeZone = new_timezone; - capi->DateTime_FromTimestamp = datetime_fromtimestamp; - capi->Date_FromTimestamp = datetime_date_fromtimestamp_capi; - capi->DateTime_FromDateAndTimeAndFold = new_datetime_ex2; - capi->Time_FromTimeAndFold = new_time_ex2; - // Make sure this function is called after utc has - // been initialized. - datetime_state *st = STATIC_STATE(); - assert(st->utc != NULL); - capi->TimeZone_UTC = st->utc; // borrowed ref - return capi; -} - -static void -datetime_destructor(PyObject *op) -{ - void *ptr = PyCapsule_GetPointer(op, PyDateTime_CAPSULE_NAME); - PyMem_Free(ptr); -} - -static int -datetime_clear(PyObject *module) -{ - datetime_state *st = STATIC_STATE(); - - Py_CLEAR(st->us_per_ms); - Py_CLEAR(st->us_per_second); - Py_CLEAR(st->us_per_minute); - Py_CLEAR(st->us_per_hour); - Py_CLEAR(st->us_per_day); - Py_CLEAR(st->us_per_week); - Py_CLEAR(st->seconds_per_day); - Py_CLEAR(st->utc); - Py_CLEAR(st->epoch); - return 0; + return &capi; } static PyObject * @@ -6775,9 +7019,45 @@ create_timezone_from_delta(int days, int sec, int ms, int normalize) return tz; } + +/* --------------------------------------------------------------------------- + * Module state lifecycle. + */ + static int -init_state(datetime_state *st) -{ +init_state(datetime_state *st, PyObject *module, PyObject *old_module) +{ + /* Each module gets its own heap types. */ +#define ADD_TYPE(FIELD, SPEC, BASE) \ + do { \ + PyObject *cls = PyType_FromModuleAndSpec( \ + module, SPEC, (PyObject *)BASE); \ + if (cls == NULL) { \ + return -1; \ + } \ + st->FIELD = (PyTypeObject *)cls; \ + } while (0) + + ADD_TYPE(isocalendar_date_type, &isocal_spec, &PyTuple_Type); +#undef ADD_TYPE + + if (old_module != NULL) { + assert(old_module != module); + datetime_state *st_old = get_module_state(old_module); + *st = (datetime_state){ + .isocalendar_date_type = st->isocalendar_date_type, + .us_per_ms = Py_NewRef(st_old->us_per_ms), + .us_per_second = Py_NewRef(st_old->us_per_second), + .us_per_minute = Py_NewRef(st_old->us_per_minute), + .us_per_hour = Py_NewRef(st_old->us_per_hour), + .us_per_day = Py_NewRef(st_old->us_per_day), + .us_per_week = Py_NewRef(st_old->us_per_week), + .seconds_per_day = Py_NewRef(st_old->seconds_per_day), + .epoch = Py_NewRef(st_old->epoch), + }; + return 0; + } + st->us_per_ms = PyLong_FromLong(1000); if (st->us_per_ms == NULL) { return -1; @@ -6811,51 +7091,155 @@ init_state(datetime_state *st) return -1; } - /* Init UTC timezone */ - st->utc = create_timezone_from_delta(0, 0, 0, 0); - if (st->utc == NULL) { - return -1; - } - /* Init Unix epoch */ - st->epoch = new_datetime(1970, 1, 1, 0, 0, 0, 0, st->utc, 0); + st->epoch = new_datetime( + 1970, 1, 1, 0, 0, 0, 0, (PyObject *)&utc_timezone, 0); if (st->epoch == NULL) { return -1; } + return 0; } static int -_datetime_exec(PyObject *module) +traverse_state(datetime_state *st, visitproc visit, void *arg) +{ + /* heap types */ + Py_VISIT(st->isocalendar_date_type); + + return 0; +} + +static int +clear_state(datetime_state *st) { + Py_CLEAR(st->isocalendar_date_type); + Py_CLEAR(st->us_per_ms); + Py_CLEAR(st->us_per_second); + Py_CLEAR(st->us_per_minute); + Py_CLEAR(st->us_per_hour); + Py_CLEAR(st->us_per_day); + Py_CLEAR(st->us_per_week); + Py_CLEAR(st->seconds_per_day); + Py_CLEAR(st->epoch); + return 0; +} + + +/* --------------------------------------------------------------------------- + * Global module state. + */ + +// If we make _PyStaticType_*ForExtension() public +// then all this should be managed by the runtime. + +static struct { + PyMutex mutex; + int64_t interp_count; +} _globals = {0}; + +static void +callback_for_interp_exit(void *Py_UNUSED(data)) +{ + PyInterpreterState *interp = PyInterpreterState_Get(); + + assert(_globals.interp_count > 0); + PyMutex_Lock(&_globals.mutex); + _globals.interp_count -= 1; + int final = !_globals.interp_count; + PyMutex_Unlock(&_globals.mutex); + + /* They must be done in reverse order so subclasses are finalized + * before base classes. */ + for (size_t i = Py_ARRAY_LENGTH(capi_types); i > 0; i--) { + PyTypeObject *type = capi_types[i-1]; + _PyStaticType_FiniForExtension(interp, type, final); + } +} + +static int +init_static_types(PyInterpreterState *interp, int reloading) +{ + if (reloading) { + return 0; + } + // `&...` is not a constant expression according to a strict reading // of C standards. Fill tp_base at run-time rather than statically. // See https://bugs.python.org/issue40777 - PyDateTime_IsoCalendarDateType.tp_base = &PyTuple_Type; PyDateTime_TimeZoneType.tp_base = &PyDateTime_TZInfoType; PyDateTime_DateTimeType.tp_base = &PyDateTime_DateType; - PyTypeObject *types[] = { - &PyDateTime_DateType, - &PyDateTime_DateTimeType, - &PyDateTime_TimeType, - &PyDateTime_DeltaType, - &PyDateTime_TZInfoType, - &PyDateTime_TimeZoneType, - }; + /* Bases classes must be initialized before subclasses, + * so capi_types must have the types in the appropriate order. */ + for (size_t i = 0; i < Py_ARRAY_LENGTH(capi_types); i++) { + PyTypeObject *type = capi_types[i]; + if (_PyStaticType_InitForExtension(interp, type) < 0) { + return -1; + } + } + + PyMutex_Lock(&_globals.mutex); + assert(_globals.interp_count >= 0); + _globals.interp_count += 1; + PyMutex_Unlock(&_globals.mutex); + + /* It could make sense to add a separate callback + * for each of the types. However, for now we can take the simpler + * approach of a single callback. */ + if (PyUnstable_AtExit(interp, callback_for_interp_exit, NULL) < 0) { + callback_for_interp_exit(NULL); + return -1; + } + + return 0; +} + + +/* --------------------------------------------------------------------------- + * Module methods and initialization. + */ + +static PyMethodDef module_methods[] = { + {NULL, NULL} +}; - for (size_t i = 0; i < Py_ARRAY_LENGTH(types); i++) { - if (PyModule_AddType(module, types[i]) < 0) { + +static int +_datetime_exec(PyObject *module) +{ + int rc = -1; + datetime_state *st = get_module_state(module); + int reloading = 0; + + PyInterpreterState *interp = PyInterpreterState_Get(); + PyObject *old_module = get_current_module(interp, &reloading); + if (PyErr_Occurred()) { + assert(old_module == NULL); + goto error; + } + /* We actually set the "current" module right before a successful return. */ + + if (init_static_types(interp, reloading) < 0) { + goto error; + } + + for (size_t i = 0; i < Py_ARRAY_LENGTH(capi_types); i++) { + PyTypeObject *type = capi_types[i]; + const char *name = _PyType_Name(type); + assert(name != NULL); + if (PyModule_AddObjectRef(module, name, (PyObject *)type) < 0) { goto error; } } - if (PyType_Ready(&PyDateTime_IsoCalendarDateType) < 0) { + if (init_state(st, module, old_module) < 0) { goto error; } #define DATETIME_ADD_MACRO(dict, c, value_expr) \ do { \ + assert(!PyErr_Occurred()); \ PyObject *value = (value_expr); \ if (value == NULL) { \ goto error; \ @@ -6868,40 +7252,35 @@ _datetime_exec(PyObject *module) } while(0) /* timedelta values */ - PyObject *d = PyDateTime_DeltaType.tp_dict; + PyObject *d = _PyType_GetDict(&PyDateTime_DeltaType); DATETIME_ADD_MACRO(d, "resolution", new_delta(0, 0, 1, 0)); DATETIME_ADD_MACRO(d, "min", new_delta(-MAX_DELTA_DAYS, 0, 0, 0)); DATETIME_ADD_MACRO(d, "max", new_delta(MAX_DELTA_DAYS, 24*3600-1, 1000000-1, 0)); /* date values */ - d = PyDateTime_DateType.tp_dict; + d = _PyType_GetDict(&PyDateTime_DateType); DATETIME_ADD_MACRO(d, "min", new_date(1, 1, 1)); DATETIME_ADD_MACRO(d, "max", new_date(MAXYEAR, 12, 31)); DATETIME_ADD_MACRO(d, "resolution", new_delta(1, 0, 0, 0)); /* time values */ - d = PyDateTime_TimeType.tp_dict; + d = _PyType_GetDict(&PyDateTime_TimeType); DATETIME_ADD_MACRO(d, "min", new_time(0, 0, 0, 0, Py_None, 0)); DATETIME_ADD_MACRO(d, "max", new_time(23, 59, 59, 999999, Py_None, 0)); DATETIME_ADD_MACRO(d, "resolution", new_delta(0, 0, 1, 0)); /* datetime values */ - d = PyDateTime_DateTimeType.tp_dict; + d = _PyType_GetDict(&PyDateTime_DateTimeType); DATETIME_ADD_MACRO(d, "min", new_datetime(1, 1, 1, 0, 0, 0, 0, Py_None, 0)); DATETIME_ADD_MACRO(d, "max", new_datetime(MAXYEAR, 12, 31, 23, 59, 59, 999999, Py_None, 0)); DATETIME_ADD_MACRO(d, "resolution", new_delta(0, 0, 1, 0)); - datetime_state *st = STATIC_STATE(); - if (init_state(st) < 0) { - goto error; - } - /* timezone values */ - d = PyDateTime_TimeZoneType.tp_dict; - if (PyDict_SetItemString(d, "utc", st->utc) < 0) { + d = _PyType_GetDict(&PyDateTime_TimeZoneType); + if (PyDict_SetItemString(d, "utc", (PyObject *)&utc_timezone) < 0) { goto error; } @@ -6910,12 +7289,13 @@ _datetime_exec(PyObject *module) * values. This may change in the future.*/ /* -23:59 */ - PyObject *min = create_timezone_from_delta(-1, 60, 0, 1); - DATETIME_ADD_MACRO(d, "min", min); + DATETIME_ADD_MACRO(d, "min", create_timezone_from_delta(-1, 60, 0, 1)); /* +23:59 */ - PyObject *max = create_timezone_from_delta(0, (23 * 60 + 59) * 60, 0, 0); - DATETIME_ADD_MACRO(d, "max", max); + DATETIME_ADD_MACRO( + d, "max", create_timezone_from_delta(0, (23 * 60 + 59) * 60, 0, 0)); + +#undef DATETIME_ADD_MACRO /* Add module level attributes */ if (PyModule_AddIntMacro(module, MINYEAR) < 0) { @@ -6924,7 +7304,7 @@ _datetime_exec(PyObject *module) if (PyModule_AddIntMacro(module, MAXYEAR) < 0) { goto error; } - if (PyModule_AddObjectRef(module, "UTC", st->utc) < 0) { + if (PyModule_AddObjectRef(module, "UTC", (PyObject *)&utc_timezone) < 0) { goto error; } @@ -6933,14 +7313,9 @@ _datetime_exec(PyObject *module) if (capi == NULL) { goto error; } - PyObject *capsule = PyCapsule_New(capi, PyDateTime_CAPSULE_NAME, - datetime_destructor); - if (capsule == NULL) { - PyMem_Free(capi); - goto error; - } + PyObject *capsule = PyCapsule_New(capi, PyDateTime_CAPSULE_NAME, NULL); + // (capsule == NULL) is handled by PyModule_Add if (PyModule_Add(module, "datetime_CAPI", capsule) < 0) { - PyMem_Free(capi); goto error; } @@ -6962,38 +7337,73 @@ _datetime_exec(PyObject *module) static_assert(DI100Y == 25 * DI4Y - 1, "DI100Y"); assert(DI100Y == days_before_year(100+1)); - return 0; + if (set_current_module(interp, module) < 0) { + goto error; + } + + rc = 0; + goto finally; error: - datetime_clear(module); - return -1; + clear_state(st); + +finally: + Py_XDECREF(old_module); + return rc; } -#undef DATETIME_ADD_MACRO -static struct PyModuleDef datetimemodule = { +static PyModuleDef_Slot module_slots[] = { + {Py_mod_exec, _datetime_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, + {Py_mod_gil, Py_MOD_GIL_NOT_USED}, + {0, NULL}, +}; + +static int +module_traverse(PyObject *mod, visitproc visit, void *arg) +{ + datetime_state *st = get_module_state(mod); + traverse_state(st, visit, arg); + return 0; +} + +static int +module_clear(PyObject *mod) +{ + datetime_state *st = get_module_state(mod); + clear_state(st); + + PyInterpreterState *interp = PyInterpreterState_Get(); + clear_current_module(interp, mod); + + // We take care of the static types via an interpreter atexit hook. + // See callback_for_interp_exit() above. + + return 0; +} + +static void +module_free(void *mod) +{ + (void)module_clear((PyObject *)mod); +} + +static PyModuleDef datetimemodule = { .m_base = PyModuleDef_HEAD_INIT, .m_name = "_datetime", .m_doc = "Fast implementation of the datetime type.", - .m_size = -1, + .m_size = sizeof(datetime_state), .m_methods = module_methods, + .m_slots = module_slots, + .m_traverse = module_traverse, + .m_clear = module_clear, + .m_free = module_free, }; PyMODINIT_FUNC PyInit__datetime(void) { - PyObject *mod = PyModule_Create(&datetimemodule); - if (mod == NULL) - return NULL; -#ifdef Py_GIL_DISABLED - PyUnstable_Module_SetGIL(mod, Py_MOD_GIL_NOT_USED); -#endif - - if (_datetime_exec(mod) < 0) { - Py_DECREF(mod); - return NULL; - } - - return mod; + return PyModuleDef_Init(&datetimemodule); } /* --------------------------------------------------------------------------- diff --git a/Modules/_io/clinic/bufferedio.c.h b/Modules/_io/clinic/bufferedio.c.h index 64eddcd314a803..708bef638887e2 100644 --- a/Modules/_io/clinic/bufferedio.c.h +++ b/Modules/_io/clinic/bufferedio.c.h @@ -4,7 +4,7 @@ preserve #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) # include "pycore_gc.h" // PyGC_Head -# include "pycore_runtime.h" // _Py_ID() +# include "pycore_runtime.h" // _Py_SINGLETON() #endif #include "pycore_abstract.h" // _PyNumber_Index() #include "pycore_critical_section.h"// Py_BEGIN_CRITICAL_SECTION() @@ -1245,4 +1245,4 @@ _io_BufferedRandom___init__(PyObject *self, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=4249187a725a3b3e input=a9049054013a1b77]*/ +/*[clinic end generated code: output=8eead000083dc5fa input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/iobase.c.h b/Modules/_io/clinic/iobase.c.h index bae80a265fab07..a35cac7dc0b8d7 100644 --- a/Modules/_io/clinic/iobase.c.h +++ b/Modules/_io/clinic/iobase.c.h @@ -2,6 +2,9 @@ preserve [clinic start generated code]*/ +#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) +# include "pycore_runtime.h" // _Py_SINGLETON() +#endif #include "pycore_abstract.h" // _Py_convert_optional_to_ssize_t() #include "pycore_modsupport.h" // _PyArg_UnpackKeywords() @@ -438,4 +441,4 @@ _io__RawIOBase_readall(PyObject *self, PyObject *Py_UNUSED(ignored)) { return _io__RawIOBase_readall_impl(self); } -/*[clinic end generated code: output=e7326fbefc52bfba input=a9049054013a1b77]*/ +/*[clinic end generated code: output=dab5e9323d191e32 input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/textio.c.h b/Modules/_io/clinic/textio.c.h index f04ee729abc9ed..669e2aa637ebbf 100644 --- a/Modules/_io/clinic/textio.c.h +++ b/Modules/_io/clinic/textio.c.h @@ -4,7 +4,7 @@ preserve #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) # include "pycore_gc.h" // PyGC_Head -# include "pycore_runtime.h" // _Py_ID() +# include "pycore_runtime.h" // _Py_SINGLETON() #endif #include "pycore_abstract.h" // _Py_convert_optional_to_ssize_t() #include "pycore_critical_section.h"// Py_BEGIN_CRITICAL_SECTION() @@ -1292,4 +1292,4 @@ _io_TextIOWrapper__CHUNK_SIZE_set(textio *self, PyObject *value, void *Py_UNUSED return return_value; } -/*[clinic end generated code: output=93a5a91a22100a28 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=04cb7c67791a9ec1 input=a9049054013a1b77]*/ diff --git a/Modules/_multiprocessing/clinic/semaphore.c.h b/Modules/_multiprocessing/clinic/semaphore.c.h index 64e666b5af6f5b..512e5a016192fb 100644 --- a/Modules/_multiprocessing/clinic/semaphore.c.h +++ b/Modules/_multiprocessing/clinic/semaphore.c.h @@ -473,7 +473,13 @@ _multiprocessing_SemLock___enter___impl(SemLockObject *self); static PyObject * _multiprocessing_SemLock___enter__(SemLockObject *self, PyObject *Py_UNUSED(ignored)) { - return _multiprocessing_SemLock___enter___impl(self); + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = _multiprocessing_SemLock___enter___impl(self); + Py_END_CRITICAL_SECTION(); + + return return_value; } #endif /* defined(HAVE_MP_SEMAPHORE) */ @@ -518,7 +524,9 @@ _multiprocessing_SemLock___exit__(SemLockObject *self, PyObject *const *args, Py } exc_tb = args[2]; skip_optional: + Py_BEGIN_CRITICAL_SECTION(self); return_value = _multiprocessing_SemLock___exit___impl(self, exc_type, exc_value, exc_tb); + Py_END_CRITICAL_SECTION(); exit: return return_value; @@ -565,4 +573,4 @@ _multiprocessing_SemLock___exit__(SemLockObject *self, PyObject *const *args, Py #ifndef _MULTIPROCESSING_SEMLOCK___EXIT___METHODDEF #define _MULTIPROCESSING_SEMLOCK___EXIT___METHODDEF #endif /* !defined(_MULTIPROCESSING_SEMLOCK___EXIT___METHODDEF) */ -/*[clinic end generated code: output=713b597256233716 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=dea36482d23a355f input=a9049054013a1b77]*/ diff --git a/Modules/_multiprocessing/semaphore.c b/Modules/_multiprocessing/semaphore.c index 5bb055f501e35b..4de4ee6c78fbd1 100644 --- a/Modules/_multiprocessing/semaphore.c +++ b/Modules/_multiprocessing/semaphore.c @@ -682,6 +682,7 @@ _multiprocessing_SemLock__after_fork_impl(SemLockObject *self) } /*[clinic input] +@critical_section _multiprocessing.SemLock.__enter__ Enter the semaphore/lock. @@ -689,12 +690,13 @@ Enter the semaphore/lock. static PyObject * _multiprocessing_SemLock___enter___impl(SemLockObject *self) -/*[clinic end generated code: output=beeb2f07c858511f input=c5e27d594284690b]*/ +/*[clinic end generated code: output=beeb2f07c858511f input=d35c9860992ee790]*/ { return _multiprocessing_SemLock_acquire_impl(self, 1, Py_None); } /*[clinic input] +@critical_section _multiprocessing.SemLock.__exit__ exc_type: object = None @@ -709,7 +711,7 @@ static PyObject * _multiprocessing_SemLock___exit___impl(SemLockObject *self, PyObject *exc_type, PyObject *exc_value, PyObject *exc_tb) -/*[clinic end generated code: output=3b37c1a9f8b91a03 input=7d644b64a89903f8]*/ +/*[clinic end generated code: output=3b37c1a9f8b91a03 input=1610c8cc3e0e337e]*/ { return _multiprocessing_SemLock_release_impl(self); } diff --git a/Modules/_testcapi/monitoring.c b/Modules/_testcapi/monitoring.c index aa90cfc06c1536..6fd4a405688f48 100644 --- a/Modules/_testcapi/monitoring.c +++ b/Modules/_testcapi/monitoring.c @@ -416,16 +416,17 @@ fire_event_stop_iteration(PyObject *self, PyObject *args) { PyObject *codelike; int offset; - PyObject *exception; - if (!PyArg_ParseTuple(args, "OiO", &codelike, &offset, &exception)) { + PyObject *value; + if (!PyArg_ParseTuple(args, "OiO", &codelike, &offset, &value)) { return NULL; } - NULLABLE(exception); + NULLABLE(value); + PyObject *exception = NULL; PyMonitoringState *state = setup_fire(codelike, offset, exception); if (state == NULL) { return NULL; } - int res = PyMonitoring_FireStopIterationEvent(state, codelike, offset); + int res = PyMonitoring_FireStopIterationEvent(state, codelike, offset, value); RETURN_INT(teardown_fire(res, state, exception)); } diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index ff31724c0e9ff9..b58c17260626c2 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -764,6 +764,14 @@ test_thread_state(PyObject *self, PyObject *args) Py_RETURN_NONE; } +static PyObject * +gilstate_ensure_release(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + PyGILState_STATE state = PyGILState_Ensure(); + PyGILState_Release(state); + Py_RETURN_NONE; +} + #ifndef MS_WINDOWS static PyThread_type_lock wait_done = NULL; @@ -3303,6 +3311,15 @@ test_reftracer(PyObject *ob, PyObject *Py_UNUSED(ignored)) return NULL; } +static PyObject * +function_set_warning(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(args)) +{ + if (PyErr_WarnEx(PyExc_RuntimeWarning, "Testing PyErr_WarnEx", 2)) { + return NULL; + } + Py_RETURN_NONE; +} + static PyMethodDef TestMethods[] = { {"set_errno", set_errno, METH_VARARGS}, {"test_config", test_config, METH_NOARGS}, @@ -3342,6 +3359,7 @@ static PyMethodDef TestMethods[] = { {"test_get_type_dict", test_get_type_dict, METH_NOARGS}, {"test_reftracer", test_reftracer, METH_NOARGS}, {"_test_thread_state", test_thread_state, METH_VARARGS}, + {"gilstate_ensure_release", gilstate_ensure_release, METH_NOARGS}, #ifndef MS_WINDOWS {"_spawn_pthread_waiter", spawn_pthread_waiter, METH_NOARGS}, {"_end_spawned_pthread", end_spawned_pthread, METH_NOARGS}, @@ -3444,6 +3462,7 @@ static PyMethodDef TestMethods[] = { {"function_set_closure", function_set_closure, METH_VARARGS, NULL}, {"check_pyimport_addmodule", check_pyimport_addmodule, METH_VARARGS}, {"test_weakref_capi", test_weakref_capi, METH_NOARGS}, + {"function_set_warning", function_set_warning, METH_NOARGS}, {NULL, NULL} /* sentinel */ }; diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c index e209c7e05264f2..8c65b80113c23e 100644 --- a/Modules/_testinternalcapi.c +++ b/Modules/_testinternalcapi.c @@ -1965,24 +1965,18 @@ get_py_thread_id(PyObject *self, PyObject *Py_UNUSED(ignored)) #endif static PyObject * -set_immortalize_deferred(PyObject *self, PyObject *value) +suppress_immortalization(PyObject *self, PyObject *value) { #ifdef Py_GIL_DISABLED - PyInterpreterState *interp = PyInterpreterState_Get(); - int old_enabled = interp->gc.immortalize.enabled; - int old_enabled_on_thread = interp->gc.immortalize.enable_on_thread_created; - int enabled_on_thread = 0; - if (!PyArg_ParseTuple(value, "i|i", - &interp->gc.immortalize.enabled, - &enabled_on_thread)) - { + int suppress = PyObject_IsTrue(value); + if (suppress < 0) { return NULL; } - interp->gc.immortalize.enable_on_thread_created = enabled_on_thread; - return Py_BuildValue("ii", old_enabled, old_enabled_on_thread); -#else - return Py_BuildValue("OO", Py_False, Py_False); + PyInterpreterState *interp = PyInterpreterState_Get(); + // Subtract two to suppress immortalization (so that 1 -> -1) + _Py_atomic_add_int(&interp->gc.immortalize, suppress ? -2 : 2); #endif + Py_RETURN_NONE; } static PyObject * @@ -1990,7 +1984,7 @@ get_immortalize_deferred(PyObject *self, PyObject *Py_UNUSED(ignored)) { #ifdef Py_GIL_DISABLED PyInterpreterState *interp = PyInterpreterState_Get(); - return PyBool_FromLong(interp->gc.immortalize.enable_on_thread_created); + return PyBool_FromLong(_Py_atomic_load_int(&interp->gc.immortalize) >= 0); #else Py_RETURN_FALSE; #endif @@ -2006,6 +2000,25 @@ has_inline_values(PyObject *self, PyObject *obj) Py_RETURN_FALSE; } + +/*[clinic input] +gh_119213_getargs + + spam: object = None + +Test _PyArg_Parser.kwtuple +[clinic start generated code]*/ + +static PyObject * +gh_119213_getargs_impl(PyObject *module, PyObject *spam) +/*[clinic end generated code: output=d8d9c95d5b446802 input=65ef47511da80fc2]*/ +{ + // It must never have been called in the main interprer + assert(!_Py_IsMainInterpreter(PyInterpreterState_Get())); + return Py_NewRef(spam); +} + + static PyMethodDef module_functions[] = { {"get_configs", get_configs, METH_NOARGS}, {"get_recursion_depth", get_recursion_depth, METH_NOARGS}, @@ -2091,11 +2104,12 @@ static PyMethodDef module_functions[] = { #ifdef Py_GIL_DISABLED {"py_thread_id", get_py_thread_id, METH_NOARGS}, #endif - {"set_immortalize_deferred", set_immortalize_deferred, METH_VARARGS}, + {"suppress_immortalization", suppress_immortalization, METH_O}, {"get_immortalize_deferred", get_immortalize_deferred, METH_NOARGS}, #ifdef _Py_TIER2 {"uop_symbols_test", _Py_uop_symbols_test, METH_NOARGS}, #endif + GH_119213_GETARGS_METHODDEF {NULL, NULL} /* sentinel */ }; diff --git a/Modules/_testinternalcapi/test_critical_sections.c b/Modules/_testinternalcapi/test_critical_sections.c index cdf8a70fc79ff3..1c0e049efafcb7 100644 --- a/Modules/_testinternalcapi/test_critical_sections.c +++ b/Modules/_testinternalcapi/test_critical_sections.c @@ -49,15 +49,6 @@ test_critical_sections(PyObject *self, PyObject *Py_UNUSED(args)) Py_END_CRITICAL_SECTION2(); assert_nogil(!PyMutex_IsLocked(&d2->ob_mutex)); - // Optional variant behaves the same if the object is non-NULL - Py_XBEGIN_CRITICAL_SECTION(d1); - assert_nogil(PyMutex_IsLocked(&d1->ob_mutex)); - Py_XEND_CRITICAL_SECTION(); - - // No-op - Py_XBEGIN_CRITICAL_SECTION(NULL); - Py_XEND_CRITICAL_SECTION(); - Py_DECREF(d2); Py_DECREF(d1); Py_RETURN_NONE; diff --git a/Modules/_testinternalcapi/test_lock.c b/Modules/_testinternalcapi/test_lock.c index 4900459c689279..1544fe1363c7c5 100644 --- a/Modules/_testinternalcapi/test_lock.c +++ b/Modules/_testinternalcapi/test_lock.c @@ -2,6 +2,7 @@ #include "parts.h" #include "pycore_lock.h" +#include "pycore_pythread.h" // PyThread_get_thread_ident_ex() #include "clinic/test_lock.c.h" @@ -476,6 +477,29 @@ test_lock_rwlock(PyObject *self, PyObject *obj) Py_RETURN_NONE; } +static PyObject * +test_lock_recursive(PyObject *self, PyObject *obj) +{ + _PyRecursiveMutex m = (_PyRecursiveMutex){0}; + assert(!_PyRecursiveMutex_IsLockedByCurrentThread(&m)); + + _PyRecursiveMutex_Lock(&m); + assert(m.thread == PyThread_get_thread_ident_ex()); + assert(PyMutex_IsLocked(&m.mutex)); + assert(m.level == 0); + + _PyRecursiveMutex_Lock(&m); + assert(m.level == 1); + _PyRecursiveMutex_Unlock(&m); + + _PyRecursiveMutex_Unlock(&m); + assert(m.thread == 0); + assert(!PyMutex_IsLocked(&m.mutex)); + assert(m.level == 0); + + Py_RETURN_NONE; +} + static PyMethodDef test_methods[] = { {"test_lock_basic", test_lock_basic, METH_NOARGS}, {"test_lock_two_threads", test_lock_two_threads, METH_NOARGS}, @@ -485,6 +509,7 @@ static PyMethodDef test_methods[] = { {"test_lock_benchmark", test_lock_benchmark, METH_NOARGS}, {"test_lock_once", test_lock_once, METH_NOARGS}, {"test_lock_rwlock", test_lock_rwlock, METH_NOARGS}, + {"test_lock_recursive", test_lock_recursive, METH_NOARGS}, {NULL, NULL} /* sentinel */ }; diff --git a/Modules/_testsinglephase.c b/Modules/_testsinglephase.c index 448be502466e79..066e0dbfb63fbf 100644 --- a/Modules/_testsinglephase.c +++ b/Modules/_testsinglephase.c @@ -1,7 +1,7 @@ /* Testing module for single-phase initialization of extension modules -This file contains 5 distinct modules, meaning each as its own name +This file contains 8 distinct modules, meaning each as its own name and its own init function (PyInit_...). The default import system will only find the one matching the filename: _testsinglephase. To load the others you must do so manually. For example: @@ -14,7 +14,7 @@ spec = importlib.util.spec_from_file_location(name, filename, loader=loader) mod = importlib._bootstrap._load(spec) ``` -Here are the 5 modules: +Here are the 8 modules: * _testsinglephase * def: _testsinglephase_basic, @@ -136,6 +136,32 @@ Here are the 5 modules: 5. increment .initialized_count * functions: see common functions below * import system: same as _testsinglephase_basic_copy +* _testsinglephase_check_cache_first + * def: _testsinglepahse_check_cache_first + * m_name: "_testsinglephase_check_cache_first" + * m_size: -1 + * state: none + * init function: + * tries PyState_FindModule() first + * otherwise creates empty module + * functions: none + * import system: same as _testsinglephase +* _testsinglephase_with_reinit_check_cache_first + * def: _testsinglepahse_with_reinit_check_cache_first + * m_name: "_testsinglephase_with_reinit_check_cache_first" + * m_size: 0 + * state: none + * init function: same as _testsinglephase_check_cache_first + * functions: none + * import system: same as _testsinglephase_with_reinit +* _testsinglephase_with_state_check_cache_first + * def: _testsinglepahse_with_state_check_cache_first + * m_name: "_testsinglephase_with_state_check_cache_first" + * m_size: 42 + * state: none + * init function: same as _testsinglephase_check_cache_first + * functions: none + * import system: same as _testsinglephase_with_state Module state: @@ -650,3 +676,67 @@ PyInit__testsinglephase_with_state(void) finally: return module; } + + +/****************************************************/ +/* the _testsinglephase_*_check_cache_first modules */ +/****************************************************/ + +/* Each of these modules should only be freshly loaded. That means + clearing the caches and each module def's m_base after each load. */ + +static struct PyModuleDef _testsinglephase_check_cache_first = { + PyModuleDef_HEAD_INIT, + .m_name = "_testsinglephase_check_cache_first", + .m_doc = PyDoc_STR("Test module _testsinglephase_check_cache_first"), + .m_size = -1, // no module state +}; + +PyMODINIT_FUNC +PyInit__testsinglephase_check_cache_first(void) +{ + assert(_testsinglephase_check_cache_first.m_base.m_index == 0); + PyObject *mod = PyState_FindModule(&_testsinglephase_check_cache_first); + if (mod != NULL) { + return Py_NewRef(mod); + } + return PyModule_Create(&_testsinglephase_check_cache_first); +} + + +static struct PyModuleDef _testsinglephase_with_reinit_check_cache_first = { + PyModuleDef_HEAD_INIT, + .m_name = "_testsinglephase_with_reinit_check_cache_first", + .m_doc = PyDoc_STR("Test module _testsinglephase_with_reinit_check_cache_first"), + .m_size = 0, // no module state +}; + +PyMODINIT_FUNC +PyInit__testsinglephase_with_reinit_check_cache_first(void) +{ + assert(_testsinglephase_with_reinit_check_cache_first.m_base.m_index == 0); + PyObject *mod = PyState_FindModule(&_testsinglephase_with_reinit_check_cache_first); + if (mod != NULL) { + return Py_NewRef(mod); + } + return PyModule_Create(&_testsinglephase_with_reinit_check_cache_first); +} + + +static struct PyModuleDef _testsinglephase_with_state_check_cache_first = { + PyModuleDef_HEAD_INIT, + .m_name = "_testsinglephase_with_state_check_cache_first", + .m_doc = PyDoc_STR("Test module _testsinglephase_with_state_check_cache_first"), + .m_size = 42, // not used +}; + +PyMODINIT_FUNC +PyInit__testsinglephase_with_state_check_cache_first(void) +{ + assert(_testsinglephase_with_state_check_cache_first.m_base.m_index == 0); + PyObject *mod = PyState_FindModule(&_testsinglephase_with_state_check_cache_first); + if (mod != NULL) { + return Py_NewRef(mod); + } + return PyModule_Create(&_testsinglephase_with_state_check_cache_first); +} diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c index c7e271faa4cf34..0cff36dd307c39 100644 --- a/Modules/_tkinter.c +++ b/Modules/_tkinter.c @@ -318,6 +318,7 @@ typedef struct { const Tcl_ObjType *BignumType; const Tcl_ObjType *ListType; const Tcl_ObjType *StringType; + const Tcl_ObjType *UTF32StringType; } TkappObject; #define Tkapp_Interp(v) (((TkappObject *) (v))->interp) @@ -588,14 +589,40 @@ Tkapp_New(const char *screenName, const char *className, } v->OldBooleanType = Tcl_GetObjType("boolean"); - v->BooleanType = Tcl_GetObjType("booleanString"); - v->ByteArrayType = Tcl_GetObjType("bytearray"); + { + Tcl_Obj *value; + int boolValue; + + /* Tcl 8.5 "booleanString" type is not registered + and is renamed to "boolean" in Tcl 9.0. + Based on approach suggested at + https://core.tcl-lang.org/tcl/info/3bb3bcf2da5b */ + value = Tcl_NewStringObj("true", -1); + Tcl_GetBooleanFromObj(NULL, value, &boolValue); + v->BooleanType = value->typePtr; + Tcl_DecrRefCount(value); + + // "bytearray" type is not registered in Tcl 9.0 + value = Tcl_NewByteArrayObj(NULL, 0); + v->ByteArrayType = value->typePtr; + Tcl_DecrRefCount(value); + } v->DoubleType = Tcl_GetObjType("double"); + /* TIP 484 suggests retrieving the "int" type without Tcl_GetObjType("int") + since it is no longer registered in Tcl 9.0. But even though Tcl 8.7 + only uses the "wideInt" type on platforms with 32-bit long, it still has + a registered "int" type, which FromObj() should recognize just in case. */ v->IntType = Tcl_GetObjType("int"); + if (v->IntType == NULL) { + Tcl_Obj *value = Tcl_NewIntObj(0); + v->IntType = value->typePtr; + Tcl_DecrRefCount(value); + } v->WideIntType = Tcl_GetObjType("wideInt"); v->BignumType = Tcl_GetObjType("bignum"); v->ListType = Tcl_GetObjType("list"); v->StringType = Tcl_GetObjType("string"); + v->UTF32StringType = Tcl_GetObjType("utf32string"); /* Delete the 'exit' command, which can screw things up */ Tcl_DeleteCommand(v->interp, "exit"); @@ -1124,14 +1151,6 @@ FromObj(TkappObject *tkapp, Tcl_Obj *value) return PyFloat_FromDouble(value->internalRep.doubleValue); } - if (value->typePtr == tkapp->IntType) { - long longValue; - if (Tcl_GetLongFromObj(interp, value, &longValue) == TCL_OK) - return PyLong_FromLong(longValue); - /* If there is an error in the long conversion, - fall through to wideInt handling. */ - } - if (value->typePtr == tkapp->IntType || value->typePtr == tkapp->WideIntType) { result = fromWideIntObj(tkapp, value); @@ -1176,17 +1195,12 @@ FromObj(TkappObject *tkapp, Tcl_Obj *value) return result; } - if (value->typePtr == tkapp->StringType) { + if (value->typePtr == tkapp->StringType || + value->typePtr == tkapp->UTF32StringType) + { return unicodeFromTclObj(value); } - if (tkapp->BooleanType == NULL && - strcmp(value->typePtr->name, "booleanString") == 0) { - /* booleanString type is not registered in Tcl */ - tkapp->BooleanType = value->typePtr; - return fromBoolean(tkapp, value); - } - if (tkapp->BignumType == NULL && strcmp(value->typePtr->name, "bignum") == 0) { /* bignum type is not registered in Tcl */ diff --git a/Modules/_typingmodule.c b/Modules/_typingmodule.c index 09fbb3c5e8b91d..37af00f3071e1d 100644 --- a/Modules/_typingmodule.c +++ b/Modules/_typingmodule.c @@ -63,6 +63,9 @@ _typing_exec(PyObject *m) if (PyModule_AddObjectRef(m, "TypeAliasType", (PyObject *)&_PyTypeAlias_Type) < 0) { return -1; } + if (PyType_Ready(&_PyNoDefault_Type) < 0) { + return -1; + } if (PyModule_AddObjectRef(m, "NoDefault", (PyObject *)&_Py_NoDefaultStruct) < 0) { return -1; } diff --git a/Modules/_winapi.c b/Modules/_winapi.c index cd5dd503abe61f..8794d568e92a36 100644 --- a/Modules/_winapi.c +++ b/Modules/_winapi.c @@ -224,7 +224,6 @@ create_converter('LPCVOID', '" F_POINTER "') create_converter('BOOL', 'i') # F_BOOL used previously (always 'i') create_converter('DWORD', 'k') # F_DWORD is always "k" (which is much shorter) -create_converter('LPCTSTR', 's') create_converter('UINT', 'I') # F_UINT used previously (always 'I') class LPCWSTR_converter(Py_UNICODE_converter): @@ -259,7 +258,7 @@ class LPVOID_return_converter(CReturnConverter): data.return_conversion.append( 'return_value = HANDLE_TO_PYNUM(_return_value);\n') [python start generated code]*/ -/*[python end generated code: output=da39a3ee5e6b4b0d input=ef52a757a1830d92]*/ +/*[python end generated code: output=da39a3ee5e6b4b0d input=da0a4db751936ee7]*/ #include "clinic/_winapi.c.h" @@ -530,7 +529,7 @@ _winapi_CreateFile_impl(PyObject *module, LPCWSTR file_name, { HANDLE handle; - if (PySys_Audit("_winapi.CreateFile", "uIIII", + if (PySys_Audit("_winapi.CreateFile", "ukkkk", file_name, desired_access, share_mode, creation_disposition, flags_and_attributes) < 0) { return INVALID_HANDLE_VALUE; @@ -777,7 +776,7 @@ _winapi_CreateMutexW_impl(PyObject *module, /*[clinic input] _winapi.CreateNamedPipe -> HANDLE - name: LPCTSTR + name: LPCWSTR open_mode: DWORD pipe_mode: DWORD max_instances: DWORD @@ -789,25 +788,25 @@ _winapi.CreateNamedPipe -> HANDLE [clinic start generated code]*/ static HANDLE -_winapi_CreateNamedPipe_impl(PyObject *module, LPCTSTR name, DWORD open_mode, +_winapi_CreateNamedPipe_impl(PyObject *module, LPCWSTR name, DWORD open_mode, DWORD pipe_mode, DWORD max_instances, DWORD out_buffer_size, DWORD in_buffer_size, DWORD default_timeout, LPSECURITY_ATTRIBUTES security_attributes) -/*[clinic end generated code: output=80f8c07346a94fbc input=5a73530b84d8bc37]*/ +/*[clinic end generated code: output=7d6fde93227680ba input=5bd4e4a55639ee02]*/ { HANDLE handle; - if (PySys_Audit("_winapi.CreateNamedPipe", "uII", + if (PySys_Audit("_winapi.CreateNamedPipe", "ukk", name, open_mode, pipe_mode) < 0) { return INVALID_HANDLE_VALUE; } Py_BEGIN_ALLOW_THREADS - handle = CreateNamedPipe(name, open_mode, pipe_mode, - max_instances, out_buffer_size, - in_buffer_size, default_timeout, - security_attributes); + handle = CreateNamedPipeW(name, open_mode, pipe_mode, + max_instances, out_buffer_size, + in_buffer_size, default_timeout, + security_attributes); Py_END_ALLOW_THREADS if (handle == INVALID_HANDLE_VALUE) @@ -1790,7 +1789,7 @@ _winapi_OpenEventW_impl(PyObject *module, DWORD desired_access, { HANDLE handle; - if (PySys_Audit("_winapi.OpenEventW", "Iu", desired_access, name) < 0) { + if (PySys_Audit("_winapi.OpenEventW", "ku", desired_access, name) < 0) { return INVALID_HANDLE_VALUE; } @@ -1821,7 +1820,7 @@ _winapi_OpenMutexW_impl(PyObject *module, DWORD desired_access, { HANDLE handle; - if (PySys_Audit("_winapi.OpenMutexW", "Iu", desired_access, name) < 0) { + if (PySys_Audit("_winapi.OpenMutexW", "ku", desired_access, name) < 0) { return INVALID_HANDLE_VALUE; } @@ -1882,7 +1881,7 @@ _winapi_OpenProcess_impl(PyObject *module, DWORD desired_access, { HANDLE handle; - if (PySys_Audit("_winapi.OpenProcess", "II", + if (PySys_Audit("_winapi.OpenProcess", "kk", process_id, desired_access) < 0) { return INVALID_HANDLE_VALUE; } @@ -2236,19 +2235,19 @@ _winapi_VirtualQuerySize_impl(PyObject *module, LPCVOID address) /*[clinic input] _winapi.WaitNamedPipe - name: LPCTSTR + name: LPCWSTR timeout: DWORD / [clinic start generated code]*/ static PyObject * -_winapi_WaitNamedPipe_impl(PyObject *module, LPCTSTR name, DWORD timeout) -/*[clinic end generated code: output=c2866f4439b1fe38 input=36fc781291b1862c]*/ +_winapi_WaitNamedPipe_impl(PyObject *module, LPCWSTR name, DWORD timeout) +/*[clinic end generated code: output=e161e2e630b3e9c2 input=099a4746544488fa]*/ { BOOL success; Py_BEGIN_ALLOW_THREADS - success = WaitNamedPipe(name, timeout); + success = WaitNamedPipeW(name, timeout); Py_END_ALLOW_THREADS if (!success) @@ -2917,7 +2916,7 @@ _winapi_CopyFile2_impl(PyObject *module, LPCWSTR existing_file_name, HRESULT hr; COPYFILE2_EXTENDED_PARAMETERS params = { sizeof(COPYFILE2_EXTENDED_PARAMETERS) }; - if (PySys_Audit("_winapi.CopyFile2", "uuI", + if (PySys_Audit("_winapi.CopyFile2", "uuk", existing_file_name, new_file_name, flags) < 0) { return NULL; } diff --git a/Modules/clinic/_curses_panel.c.h b/Modules/clinic/_curses_panel.c.h index 457f71370afda9..c8788c461f745c 100644 --- a/Modules/clinic/_curses_panel.c.h +++ b/Modules/clinic/_curses_panel.c.h @@ -2,6 +2,9 @@ preserve [clinic start generated code]*/ +#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) +# include "pycore_runtime.h" // _Py_SINGLETON() +#endif #include "pycore_modsupport.h" // _PyArg_UnpackKeywords() PyDoc_STRVAR(_curses_panel_panel_bottom__doc__, @@ -418,4 +421,4 @@ _curses_panel_update_panels(PyObject *module, PyObject *Py_UNUSED(ignored)) { return _curses_panel_update_panels_impl(module); } -/*[clinic end generated code: output=7bac14e9a1194c87 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=18dc5571174c7189 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_dbmmodule.c.h b/Modules/clinic/_dbmmodule.c.h index d06271e18a49b2..4379b433db3738 100644 --- a/Modules/clinic/_dbmmodule.c.h +++ b/Modules/clinic/_dbmmodule.c.h @@ -2,6 +2,9 @@ preserve [clinic start generated code]*/ +#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) +# include "pycore_runtime.h" // _Py_SINGLETON() +#endif #include "pycore_modsupport.h" // _PyArg_UnpackKeywords() PyDoc_STRVAR(_dbm_dbm_close__doc__, @@ -218,4 +221,4 @@ dbmopen(PyObject *module, PyObject *const *args, Py_ssize_t nargs) exit: return return_value; } -/*[clinic end generated code: output=743ce0cea116747e input=a9049054013a1b77]*/ +/*[clinic end generated code: output=f7d9a87d80a64278 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_elementtree.c.h b/Modules/clinic/_elementtree.c.h index 10b2dd1c15f7fd..1a5a820d1f00b5 100644 --- a/Modules/clinic/_elementtree.c.h +++ b/Modules/clinic/_elementtree.c.h @@ -4,7 +4,7 @@ preserve #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) # include "pycore_gc.h" // PyGC_Head -# include "pycore_runtime.h" // _Py_ID() +# include "pycore_runtime.h" // _Py_SINGLETON() #endif #include "pycore_abstract.h" // _PyNumber_Index() #include "pycore_modsupport.h" // _PyArg_UnpackKeywords() @@ -1236,4 +1236,4 @@ _elementtree_XMLParser__setevents(XMLParserObject *self, PyObject *const *args, exit: return return_value; } -/*[clinic end generated code: output=aed9f53eeb0404e0 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=bd28eba33d9c1f25 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_gdbmmodule.c.h b/Modules/clinic/_gdbmmodule.c.h index 626e4678809d4f..bbf4365114c0aa 100644 --- a/Modules/clinic/_gdbmmodule.c.h +++ b/Modules/clinic/_gdbmmodule.c.h @@ -2,6 +2,9 @@ preserve [clinic start generated code]*/ +#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) +# include "pycore_runtime.h" // _Py_SINGLETON() +#endif #include "pycore_modsupport.h" // _PyArg_CheckPositional() PyDoc_STRVAR(_gdbm_gdbm_get__doc__, @@ -340,4 +343,4 @@ dbmopen(PyObject *module, PyObject *const *args, Py_ssize_t nargs) exit: return return_value; } -/*[clinic end generated code: output=6b4c19905ac9967d input=a9049054013a1b77]*/ +/*[clinic end generated code: output=07bdeb4a8ecb328e input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_pickle.c.h b/Modules/clinic/_pickle.c.h index 5a6ae7be6b6ea7..693c7d59e9d7a6 100644 --- a/Modules/clinic/_pickle.c.h +++ b/Modules/clinic/_pickle.c.h @@ -4,7 +4,7 @@ preserve #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) # include "pycore_gc.h" // PyGC_Head -# include "pycore_runtime.h" // _Py_ID() +# include "pycore_runtime.h" // _Py_SINGLETON() #endif #include "pycore_modsupport.h" // _PyArg_UnpackKeywords() @@ -1077,4 +1077,4 @@ _pickle_loads(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObjec exit: return return_value; } -/*[clinic end generated code: output=bd63c85a8737b0aa input=a9049054013a1b77]*/ +/*[clinic end generated code: output=c7dd60d20ee4895f input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_testinternalcapi.c.h b/Modules/clinic/_testinternalcapi.c.h index a61858561d5ef8..dcca9ecf735723 100644 --- a/Modules/clinic/_testinternalcapi.c.h +++ b/Modules/clinic/_testinternalcapi.c.h @@ -300,4 +300,64 @@ _testinternalcapi_test_long_numbits(PyObject *module, PyObject *Py_UNUSED(ignore { return _testinternalcapi_test_long_numbits_impl(module); } -/*[clinic end generated code: output=9804015d77d36c77 input=a9049054013a1b77]*/ + +PyDoc_STRVAR(gh_119213_getargs__doc__, +"gh_119213_getargs($module, /, spam=None)\n" +"--\n" +"\n" +"Test _PyArg_Parser.kwtuple"); + +#define GH_119213_GETARGS_METHODDEF \ + {"gh_119213_getargs", _PyCFunction_CAST(gh_119213_getargs), METH_FASTCALL|METH_KEYWORDS, gh_119213_getargs__doc__}, + +static PyObject * +gh_119213_getargs_impl(PyObject *module, PyObject *spam); + +static PyObject * +gh_119213_getargs(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(spam), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"spam", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "gh_119213_getargs", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; + PyObject *spam = Py_None; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf); + if (!args) { + goto exit; + } + if (!noptargs) { + goto skip_optional_pos; + } + spam = args[0]; +skip_optional_pos: + return_value = gh_119213_getargs_impl(module, spam); + +exit: + return return_value; +} +/*[clinic end generated code: output=4d0770a1c20fbf40 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_winapi.c.h b/Modules/clinic/_winapi.c.h index 9acb2dc4fe7eba..b0c54fc809f4c1 100644 --- a/Modules/clinic/_winapi.c.h +++ b/Modules/clinic/_winapi.c.h @@ -445,7 +445,7 @@ PyDoc_STRVAR(_winapi_CreateNamedPipe__doc__, {"CreateNamedPipe", _PyCFunction_CAST(_winapi_CreateNamedPipe), METH_FASTCALL, _winapi_CreateNamedPipe__doc__}, static HANDLE -_winapi_CreateNamedPipe_impl(PyObject *module, LPCTSTR name, DWORD open_mode, +_winapi_CreateNamedPipe_impl(PyObject *module, LPCWSTR name, DWORD open_mode, DWORD pipe_mode, DWORD max_instances, DWORD out_buffer_size, DWORD in_buffer_size, DWORD default_timeout, @@ -455,7 +455,7 @@ static PyObject * _winapi_CreateNamedPipe(PyObject *module, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; - LPCTSTR name; + LPCWSTR name = NULL; DWORD open_mode; DWORD pipe_mode; DWORD max_instances; @@ -465,8 +465,8 @@ _winapi_CreateNamedPipe(PyObject *module, PyObject *const *args, Py_ssize_t narg LPSECURITY_ATTRIBUTES security_attributes; HANDLE _return_value; - if (!_PyArg_ParseStack(args, nargs, "skkkkkk" F_POINTER ":CreateNamedPipe", - &name, &open_mode, &pipe_mode, &max_instances, &out_buffer_size, &in_buffer_size, &default_timeout, &security_attributes)) { + if (!_PyArg_ParseStack(args, nargs, "O&kkkkkk" F_POINTER ":CreateNamedPipe", + _PyUnicode_WideCharString_Converter, &name, &open_mode, &pipe_mode, &max_instances, &out_buffer_size, &in_buffer_size, &default_timeout, &security_attributes)) { goto exit; } _return_value = _winapi_CreateNamedPipe_impl(module, name, open_mode, pipe_mode, max_instances, out_buffer_size, in_buffer_size, default_timeout, security_attributes); @@ -479,6 +479,9 @@ _winapi_CreateNamedPipe(PyObject *module, PyObject *const *args, Py_ssize_t narg return_value = HANDLE_TO_PYNUM(_return_value); exit: + /* Cleanup for name */ + PyMem_Free((void *)name); + return return_value; } @@ -1660,22 +1663,25 @@ PyDoc_STRVAR(_winapi_WaitNamedPipe__doc__, {"WaitNamedPipe", _PyCFunction_CAST(_winapi_WaitNamedPipe), METH_FASTCALL, _winapi_WaitNamedPipe__doc__}, static PyObject * -_winapi_WaitNamedPipe_impl(PyObject *module, LPCTSTR name, DWORD timeout); +_winapi_WaitNamedPipe_impl(PyObject *module, LPCWSTR name, DWORD timeout); static PyObject * _winapi_WaitNamedPipe(PyObject *module, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; - LPCTSTR name; + LPCWSTR name = NULL; DWORD timeout; - if (!_PyArg_ParseStack(args, nargs, "sk:WaitNamedPipe", - &name, &timeout)) { + if (!_PyArg_ParseStack(args, nargs, "O&k:WaitNamedPipe", + _PyUnicode_WideCharString_Converter, &name, &timeout)) { goto exit; } return_value = _winapi_WaitNamedPipe_impl(module, name, timeout); exit: + /* Cleanup for name */ + PyMem_Free((void *)name); + return return_value; } @@ -2118,4 +2124,4 @@ _winapi_CopyFile2(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyO return return_value; } -/*[clinic end generated code: output=ed94a2482ede3744 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=2304c62187a90140 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/arraymodule.c.h b/Modules/clinic/arraymodule.c.h index 60a03fe012550e..1046355dc38f56 100644 --- a/Modules/clinic/arraymodule.c.h +++ b/Modules/clinic/arraymodule.c.h @@ -2,6 +2,9 @@ preserve [clinic start generated code]*/ +#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) +# include "pycore_runtime.h" // _Py_SINGLETON() +#endif #include "pycore_abstract.h" // _PyNumber_Index() #include "pycore_modsupport.h" // _PyArg_CheckPositional() @@ -685,4 +688,4 @@ PyDoc_STRVAR(array_arrayiterator___setstate____doc__, #define ARRAY_ARRAYITERATOR___SETSTATE___METHODDEF \ {"__setstate__", (PyCFunction)array_arrayiterator___setstate__, METH_O, array_arrayiterator___setstate____doc__}, -/*[clinic end generated code: output=52c55d9b1d026c1c input=a9049054013a1b77]*/ +/*[clinic end generated code: output=f675e0d433ae34b6 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h index a0d1f3238a6733..69fc178331c09c 100644 --- a/Modules/clinic/posixmodule.c.h +++ b/Modules/clinic/posixmodule.c.h @@ -72,7 +72,7 @@ os_stat(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwn #undef KWTUPLE PyObject *argsbuf[3]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; - path_t path = PATH_T_INITIALIZE("stat", "path", 0, 1); + path_t path = PATH_T_INITIALIZE_P("stat", "path", 0, 0, 0, 1); int dir_fd = DEFAULT_DIR_FD; int follow_symlinks = 1; @@ -154,7 +154,7 @@ os_lstat(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw #undef KWTUPLE PyObject *argsbuf[2]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; - path_t path = PATH_T_INITIALIZE("lstat", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("lstat", "path", 0, 0, 0, 0); int dir_fd = DEFAULT_DIR_FD; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); @@ -250,7 +250,7 @@ os_access(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *k #undef KWTUPLE PyObject *argsbuf[5]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2; - path_t path = PATH_T_INITIALIZE("access", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("access", "path", 0, 0, 0, 0); int mode; int dir_fd = DEFAULT_DIR_FD; int effective_ids = 0; @@ -409,7 +409,7 @@ os_chdir(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw }; #undef KWTUPLE PyObject *argsbuf[1]; - path_t path = PATH_T_INITIALIZE("chdir", "path", 0, PATH_HAVE_FCHDIR); + path_t path = PATH_T_INITIALIZE_P("chdir", "path", 0, 0, 0, PATH_HAVE_FCHDIR); args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); if (!args) { @@ -560,7 +560,7 @@ os_chmod(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw #undef KWTUPLE PyObject *argsbuf[4]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2; - path_t path = PATH_T_INITIALIZE("chmod", "path", 0, PATH_HAVE_FCHMOD); + path_t path = PATH_T_INITIALIZE_P("chmod", "path", 0, 0, 0, PATH_HAVE_FCHMOD); int mode; int dir_fd = DEFAULT_DIR_FD; int follow_symlinks = CHMOD_DEFAULT_FOLLOW_SYMLINKS; @@ -725,7 +725,7 @@ os_lchmod(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *k }; #undef KWTUPLE PyObject *argsbuf[2]; - path_t path = PATH_T_INITIALIZE("lchmod", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("lchmod", "path", 0, 0, 0, 0); int mode; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 2, 0, argsbuf); @@ -802,7 +802,7 @@ os_chflags(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject * #undef KWTUPLE PyObject *argsbuf[3]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2; - path_t path = PATH_T_INITIALIZE("chflags", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("chflags", "path", 0, 0, 0, 0); unsigned long flags; int follow_symlinks = 1; @@ -884,7 +884,7 @@ os_lchflags(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject }; #undef KWTUPLE PyObject *argsbuf[2]; - path_t path = PATH_T_INITIALIZE("lchflags", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("lchflags", "path", 0, 0, 0, 0); unsigned long flags; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 2, 0, argsbuf); @@ -954,7 +954,7 @@ os_chroot(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *k }; #undef KWTUPLE PyObject *argsbuf[1]; - path_t path = PATH_T_INITIALIZE("chroot", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("chroot", "path", 0, 0, 0, 0); args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); if (!args) { @@ -1190,7 +1190,7 @@ os_chown(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw #undef KWTUPLE PyObject *argsbuf[5]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 3; - path_t path = PATH_T_INITIALIZE("chown", "path", 0, PATH_HAVE_FCHOWN); + path_t path = PATH_T_INITIALIZE_P("chown", "path", 0, 0, 0, PATH_HAVE_FCHOWN); uid_t uid; gid_t gid; int dir_fd = DEFAULT_DIR_FD; @@ -1355,7 +1355,7 @@ os_lchown(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *k }; #undef KWTUPLE PyObject *argsbuf[3]; - path_t path = PATH_T_INITIALIZE("lchown", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("lchown", "path", 0, 0, 0, 0); uid_t uid; gid_t gid; @@ -1476,8 +1476,8 @@ os_link(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwn #undef KWTUPLE PyObject *argsbuf[5]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2; - path_t src = PATH_T_INITIALIZE("link", "src", 0, 0); - path_t dst = PATH_T_INITIALIZE("link", "dst", 0, 0); + path_t src = PATH_T_INITIALIZE_P("link", "src", 0, 0, 0, 0); + path_t dst = PATH_T_INITIALIZE_P("link", "dst", 0, 0, 0, 0); int src_dir_fd = DEFAULT_DIR_FD; int dst_dir_fd = DEFAULT_DIR_FD; int follow_symlinks = 1; @@ -1583,7 +1583,7 @@ os_listdir(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject * #undef KWTUPLE PyObject *argsbuf[1]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; - path_t path = PATH_T_INITIALIZE("listdir", "path", 1, PATH_HAVE_FDOPENDIR); + path_t path = PATH_T_INITIALIZE_P("listdir", "path", 1, 0, 0, PATH_HAVE_FDOPENDIR); args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf); if (!args) { @@ -1699,7 +1699,7 @@ os_listmounts(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObjec }; #undef KWTUPLE PyObject *argsbuf[1]; - path_t volume = PATH_T_INITIALIZE("listmounts", "volume", 0, 0); + path_t volume = PATH_T_INITIALIZE_P("listmounts", "volume", 0, 0, 0, 0); args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); if (!args) { @@ -1763,7 +1763,7 @@ os__path_isdevdrive(PyObject *module, PyObject *const *args, Py_ssize_t nargs, P }; #undef KWTUPLE PyObject *argsbuf[1]; - path_t path = PATH_T_INITIALIZE("_path_isdevdrive", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("_path_isdevdrive", "path", 0, 0, 0, 0); args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); if (!args) { @@ -1800,7 +1800,7 @@ static PyObject * os__getfullpathname(PyObject *module, PyObject *arg) { PyObject *return_value = NULL; - path_t path = PATH_T_INITIALIZE("_getfullpathname", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("_getfullpathname", "path", 0, 0, 0, 0); if (!path_converter(arg, &path)) { goto exit; @@ -1834,7 +1834,7 @@ static PyObject * os__getfinalpathname(PyObject *module, PyObject *arg) { PyObject *return_value = NULL; - path_t path = PATH_T_INITIALIZE("_getfinalpathname", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("_getfinalpathname", "path", 0, 0, 0, 0); if (!path_converter(arg, &path)) { goto exit; @@ -1868,7 +1868,7 @@ static PyObject * os__findfirstfile(PyObject *module, PyObject *arg) { PyObject *return_value = NULL; - path_t path = PATH_T_INITIALIZE("_findfirstfile", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("_findfirstfile", "path", 0, 0, 0, 0); if (!path_converter(arg, &path)) { goto exit; @@ -1928,7 +1928,7 @@ os__getvolumepathname(PyObject *module, PyObject *const *args, Py_ssize_t nargs, }; #undef KWTUPLE PyObject *argsbuf[1]; - path_t path = PATH_T_INITIALIZE("_getvolumepathname", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("_getvolumepathname", "path", 0, 0, 0, 0); args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); if (!args) { @@ -1992,7 +1992,7 @@ os__path_splitroot(PyObject *module, PyObject *const *args, Py_ssize_t nargs, Py }; #undef KWTUPLE PyObject *argsbuf[1]; - path_t path = PATH_T_INITIALIZE("_path_splitroot", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("_path_splitroot", "path", 0, 0, 0, 0); args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); if (!args) { @@ -2014,6 +2014,84 @@ os__path_splitroot(PyObject *module, PyObject *const *args, Py_ssize_t nargs, Py #if defined(MS_WINDOWS) +PyDoc_STRVAR(os__path_exists__doc__, +"_path_exists($module, path, /)\n" +"--\n" +"\n" +"Test whether a path exists. Returns False for broken symbolic links."); + +#define OS__PATH_EXISTS_METHODDEF \ + {"_path_exists", (PyCFunction)os__path_exists, METH_O, os__path_exists__doc__}, + +static int +os__path_exists_impl(PyObject *module, path_t *path); + +static PyObject * +os__path_exists(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + path_t path = PATH_T_INITIALIZE_P("_path_exists", "path", 0, 0, 1, 1); + int _return_value; + + if (!path_converter(arg, &path)) { + goto exit; + } + _return_value = os__path_exists_impl(module, &path); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyBool_FromLong((long)_return_value); + +exit: + /* Cleanup for path */ + path_cleanup(&path); + + return return_value; +} + +#endif /* defined(MS_WINDOWS) */ + +#if defined(MS_WINDOWS) + +PyDoc_STRVAR(os__path_lexists__doc__, +"_path_lexists($module, path, /)\n" +"--\n" +"\n" +"Test whether a path exists. Returns True for broken symbolic links."); + +#define OS__PATH_LEXISTS_METHODDEF \ + {"_path_lexists", (PyCFunction)os__path_lexists, METH_O, os__path_lexists__doc__}, + +static int +os__path_lexists_impl(PyObject *module, path_t *path); + +static PyObject * +os__path_lexists(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + path_t path = PATH_T_INITIALIZE_P("_path_lexists", "path", 0, 0, 1, 1); + int _return_value; + + if (!path_converter(arg, &path)) { + goto exit; + } + _return_value = os__path_lexists_impl(module, &path); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyBool_FromLong((long)_return_value); + +exit: + /* Cleanup for path */ + path_cleanup(&path); + + return return_value; +} + +#endif /* defined(MS_WINDOWS) */ + +#if defined(MS_WINDOWS) + PyDoc_STRVAR(os__path_isdir__doc__, "_path_isdir($module, /, s)\n" "--\n" @@ -2023,8 +2101,8 @@ PyDoc_STRVAR(os__path_isdir__doc__, #define OS__PATH_ISDIR_METHODDEF \ {"_path_isdir", _PyCFunction_CAST(os__path_isdir), METH_FASTCALL|METH_KEYWORDS, os__path_isdir__doc__}, -static PyObject * -os__path_isdir_impl(PyObject *module, PyObject *s); +static int +os__path_isdir_impl(PyObject *module, path_t *path); static PyObject * os__path_isdir(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) @@ -2056,16 +2134,26 @@ os__path_isdir(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje }; #undef KWTUPLE PyObject *argsbuf[1]; - PyObject *s; + path_t path = PATH_T_INITIALIZE_P("_path_isdir", "path", 0, 0, 1, 1); + int _return_value; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); if (!args) { goto exit; } - s = args[0]; - return_value = os__path_isdir_impl(module, s); + if (!path_converter(args[0], &path)) { + goto exit; + } + _return_value = os__path_isdir_impl(module, &path); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyBool_FromLong((long)_return_value); exit: + /* Cleanup for path */ + path_cleanup(&path); + return return_value; } @@ -2082,8 +2170,8 @@ PyDoc_STRVAR(os__path_isfile__doc__, #define OS__PATH_ISFILE_METHODDEF \ {"_path_isfile", _PyCFunction_CAST(os__path_isfile), METH_FASTCALL|METH_KEYWORDS, os__path_isfile__doc__}, -static PyObject * -os__path_isfile_impl(PyObject *module, PyObject *path); +static int +os__path_isfile_impl(PyObject *module, path_t *path); static PyObject * os__path_isfile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) @@ -2115,16 +2203,26 @@ os__path_isfile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj }; #undef KWTUPLE PyObject *argsbuf[1]; - PyObject *path; + path_t path = PATH_T_INITIALIZE_P("_path_isfile", "path", 0, 0, 1, 1); + int _return_value; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); if (!args) { goto exit; } - path = args[0]; - return_value = os__path_isfile_impl(module, path); + if (!path_converter(args[0], &path)) { + goto exit; + } + _return_value = os__path_isfile_impl(module, &path); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyBool_FromLong((long)_return_value); exit: + /* Cleanup for path */ + path_cleanup(&path); + return return_value; } @@ -2132,20 +2230,20 @@ os__path_isfile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj #if defined(MS_WINDOWS) -PyDoc_STRVAR(os__path_exists__doc__, -"_path_exists($module, /, path)\n" +PyDoc_STRVAR(os__path_islink__doc__, +"_path_islink($module, /, path)\n" "--\n" "\n" -"Test whether a path exists. Returns False for broken symbolic links"); +"Test whether a path is a symbolic link"); -#define OS__PATH_EXISTS_METHODDEF \ - {"_path_exists", _PyCFunction_CAST(os__path_exists), METH_FASTCALL|METH_KEYWORDS, os__path_exists__doc__}, +#define OS__PATH_ISLINK_METHODDEF \ + {"_path_islink", _PyCFunction_CAST(os__path_islink), METH_FASTCALL|METH_KEYWORDS, os__path_islink__doc__}, -static PyObject * -os__path_exists_impl(PyObject *module, PyObject *path); +static int +os__path_islink_impl(PyObject *module, path_t *path); static PyObject * -os__path_exists(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +os__path_islink(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -2169,21 +2267,31 @@ os__path_exists(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj static const char * const _keywords[] = {"path", NULL}; static _PyArg_Parser _parser = { .keywords = _keywords, - .fname = "_path_exists", + .fname = "_path_islink", .kwtuple = KWTUPLE, }; #undef KWTUPLE PyObject *argsbuf[1]; - PyObject *path; + path_t path = PATH_T_INITIALIZE_P("_path_islink", "path", 0, 0, 1, 1); + int _return_value; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); if (!args) { goto exit; } - path = args[0]; - return_value = os__path_exists_impl(module, path); + if (!path_converter(args[0], &path)) { + goto exit; + } + _return_value = os__path_islink_impl(module, &path); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyBool_FromLong((long)_return_value); exit: + /* Cleanup for path */ + path_cleanup(&path); + return return_value; } @@ -2191,20 +2299,20 @@ os__path_exists(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj #if defined(MS_WINDOWS) -PyDoc_STRVAR(os__path_islink__doc__, -"_path_islink($module, /, path)\n" +PyDoc_STRVAR(os__path_isjunction__doc__, +"_path_isjunction($module, /, path)\n" "--\n" "\n" -"Test whether a path is a symbolic link"); +"Test whether a path is a junction"); -#define OS__PATH_ISLINK_METHODDEF \ - {"_path_islink", _PyCFunction_CAST(os__path_islink), METH_FASTCALL|METH_KEYWORDS, os__path_islink__doc__}, +#define OS__PATH_ISJUNCTION_METHODDEF \ + {"_path_isjunction", _PyCFunction_CAST(os__path_isjunction), METH_FASTCALL|METH_KEYWORDS, os__path_isjunction__doc__}, -static PyObject * -os__path_islink_impl(PyObject *module, PyObject *path); +static int +os__path_isjunction_impl(PyObject *module, path_t *path); static PyObject * -os__path_islink(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +os__path_isjunction(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -2228,21 +2336,31 @@ os__path_islink(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj static const char * const _keywords[] = {"path", NULL}; static _PyArg_Parser _parser = { .keywords = _keywords, - .fname = "_path_islink", + .fname = "_path_isjunction", .kwtuple = KWTUPLE, }; #undef KWTUPLE PyObject *argsbuf[1]; - PyObject *path; + path_t path = PATH_T_INITIALIZE_P("_path_isjunction", "path", 0, 0, 1, 1); + int _return_value; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); if (!args) { goto exit; } - path = args[0]; - return_value = os__path_islink_impl(module, path); + if (!path_converter(args[0], &path)) { + goto exit; + } + _return_value = os__path_isjunction_impl(module, &path); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyBool_FromLong((long)_return_value); exit: + /* Cleanup for path */ + path_cleanup(&path); + return return_value; } @@ -2251,13 +2369,16 @@ os__path_islink(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj PyDoc_STRVAR(os__path_splitroot_ex__doc__, "_path_splitroot_ex($module, /, path)\n" "--\n" -"\n"); +"\n" +"Split a pathname into drive, root and tail.\n" +"\n" +"The tail contains anything after the root."); #define OS__PATH_SPLITROOT_EX_METHODDEF \ {"_path_splitroot_ex", _PyCFunction_CAST(os__path_splitroot_ex), METH_FASTCALL|METH_KEYWORDS, os__path_splitroot_ex__doc__}, static PyObject * -os__path_splitroot_ex_impl(PyObject *module, PyObject *path); +os__path_splitroot_ex_impl(PyObject *module, path_t *path); static PyObject * os__path_splitroot_ex(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) @@ -2289,20 +2410,21 @@ os__path_splitroot_ex(PyObject *module, PyObject *const *args, Py_ssize_t nargs, }; #undef KWTUPLE PyObject *argsbuf[1]; - PyObject *path; + path_t path = PATH_T_INITIALIZE("_path_splitroot_ex", "path", 0, 1, 1, 0, 0); args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); if (!args) { goto exit; } - if (!PyUnicode_Check(args[0])) { - _PyArg_BadArgument("_path_splitroot_ex", "argument 'path'", "str", args[0]); + if (!path_converter(args[0], &path)) { goto exit; } - path = args[0]; - return_value = os__path_splitroot_ex_impl(module, path); + return_value = os__path_splitroot_ex_impl(module, &path); exit: + /* Cleanup for path */ + path_cleanup(&path); + return return_value; } @@ -2310,13 +2432,13 @@ PyDoc_STRVAR(os__path_normpath__doc__, "_path_normpath($module, /, path)\n" "--\n" "\n" -"Basic path normalization."); +"Normalize path, eliminating double slashes, etc."); #define OS__PATH_NORMPATH_METHODDEF \ {"_path_normpath", _PyCFunction_CAST(os__path_normpath), METH_FASTCALL|METH_KEYWORDS, os__path_normpath__doc__}, static PyObject * -os__path_normpath_impl(PyObject *module, PyObject *path); +os__path_normpath_impl(PyObject *module, path_t *path); static PyObject * os__path_normpath(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) @@ -2348,16 +2470,21 @@ os__path_normpath(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyO }; #undef KWTUPLE PyObject *argsbuf[1]; - PyObject *path; + path_t path = PATH_T_INITIALIZE("_path_normpath", "path", 0, 1, 1, 0, 0); args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); if (!args) { goto exit; } - path = args[0]; - return_value = os__path_normpath_impl(module, path); + if (!path_converter(args[0], &path)) { + goto exit; + } + return_value = os__path_normpath_impl(module, &path); exit: + /* Cleanup for path */ + path_cleanup(&path); + return return_value; } @@ -2412,7 +2539,7 @@ os_mkdir(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw #undef KWTUPLE PyObject *argsbuf[3]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; - path_t path = PATH_T_INITIALIZE("mkdir", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("mkdir", "path", 0, 0, 0, 0); int mode = 511; int dir_fd = DEFAULT_DIR_FD; @@ -2673,8 +2800,8 @@ os_rename(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *k #undef KWTUPLE PyObject *argsbuf[4]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2; - path_t src = PATH_T_INITIALIZE("rename", "src", 0, 0); - path_t dst = PATH_T_INITIALIZE("rename", "dst", 0, 0); + path_t src = PATH_T_INITIALIZE_P("rename", "src", 0, 0, 0, 0); + path_t dst = PATH_T_INITIALIZE_P("rename", "dst", 0, 0, 0, 0); int src_dir_fd = DEFAULT_DIR_FD; int dst_dir_fd = DEFAULT_DIR_FD; @@ -2764,8 +2891,8 @@ os_replace(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject * #undef KWTUPLE PyObject *argsbuf[4]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2; - path_t src = PATH_T_INITIALIZE("replace", "src", 0, 0); - path_t dst = PATH_T_INITIALIZE("replace", "dst", 0, 0); + path_t src = PATH_T_INITIALIZE_P("replace", "src", 0, 0, 0, 0); + path_t dst = PATH_T_INITIALIZE_P("replace", "dst", 0, 0, 0, 0); int src_dir_fd = DEFAULT_DIR_FD; int dst_dir_fd = DEFAULT_DIR_FD; @@ -2853,7 +2980,7 @@ os_rmdir(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw #undef KWTUPLE PyObject *argsbuf[2]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; - path_t path = PATH_T_INITIALIZE("rmdir", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("rmdir", "path", 0, 0, 0, 0); int dir_fd = DEFAULT_DIR_FD; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); @@ -3102,7 +3229,7 @@ os_unlink(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *k #undef KWTUPLE PyObject *argsbuf[2]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; - path_t path = PATH_T_INITIALIZE("unlink", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("unlink", "path", 0, 0, 0, 0); int dir_fd = DEFAULT_DIR_FD; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); @@ -3176,7 +3303,7 @@ os_remove(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *k #undef KWTUPLE PyObject *argsbuf[2]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; - path_t path = PATH_T_INITIALIZE("remove", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("remove", "path", 0, 0, 0, 0); int dir_fd = DEFAULT_DIR_FD; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); @@ -3294,7 +3421,7 @@ os_utime(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw #undef KWTUPLE PyObject *argsbuf[5]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; - path_t path = PATH_T_INITIALIZE("utime", "path", 0, PATH_UTIME_HAVE_FD); + path_t path = PATH_T_INITIALIZE_P("utime", "path", 0, 0, 0, PATH_UTIME_HAVE_FD); PyObject *times = Py_None; PyObject *ns = NULL; int dir_fd = DEFAULT_DIR_FD; @@ -3429,7 +3556,7 @@ static PyObject * os_execv(PyObject *module, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; - path_t path = PATH_T_INITIALIZE("execv", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("execv", "path", 0, 0, 0, 0); PyObject *argv; if (!_PyArg_CheckPositional("execv", nargs, 2, 2)) { @@ -3501,7 +3628,7 @@ os_execve(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *k }; #undef KWTUPLE PyObject *argsbuf[3]; - path_t path = PATH_T_INITIALIZE("execve", "path", 0, PATH_HAVE_FEXECVE); + path_t path = PATH_T_INITIALIZE_P("execve", "path", 0, 0, 0, PATH_HAVE_FEXECVE); PyObject *argv; PyObject *env; @@ -3597,7 +3724,7 @@ os_posix_spawn(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje #undef KWTUPLE PyObject *argsbuf[10]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 3; - path_t path = PATH_T_INITIALIZE("posix_spawn", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("posix_spawn", "path", 0, 0, 0, 0); PyObject *argv; PyObject *env; PyObject *file_actions = NULL; @@ -3747,7 +3874,7 @@ os_posix_spawnp(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj #undef KWTUPLE PyObject *argsbuf[10]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 3; - path_t path = PATH_T_INITIALIZE("posix_spawnp", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("posix_spawnp", "path", 0, 0, 0, 0); PyObject *argv; PyObject *env; PyObject *file_actions = NULL; @@ -3851,7 +3978,7 @@ os_spawnv(PyObject *module, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int mode; - path_t path = PATH_T_INITIALIZE("spawnv", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("spawnv", "path", 0, 0, 0, 0); PyObject *argv; if (!_PyArg_CheckPositional("spawnv", nargs, 3, 3)) { @@ -3905,7 +4032,7 @@ os_spawnve(PyObject *module, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int mode; - path_t path = PATH_T_INITIALIZE("spawnve", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("spawnve", "path", 0, 0, 0, 0); PyObject *argv; PyObject *env; @@ -6081,7 +6208,7 @@ os_readlink(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject #undef KWTUPLE PyObject *argsbuf[2]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; - path_t path = PATH_T_INITIALIZE("readlink", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("readlink", "path", 0, 0, 0, 0); int dir_fd = DEFAULT_DIR_FD; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); @@ -6165,8 +6292,8 @@ os_symlink(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject * #undef KWTUPLE PyObject *argsbuf[4]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2; - path_t src = PATH_T_INITIALIZE("symlink", "src", 0, 0); - path_t dst = PATH_T_INITIALIZE("symlink", "dst", 0, 0); + path_t src = PATH_T_INITIALIZE_P("symlink", "src", 0, 0, 0, 0); + path_t dst = PATH_T_INITIALIZE_P("symlink", "dst", 0, 0, 0, 0); int target_is_directory = 0; int dir_fd = DEFAULT_DIR_FD; @@ -6808,7 +6935,7 @@ os_open(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwn #undef KWTUPLE PyObject *argsbuf[4]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2; - path_t path = PATH_T_INITIALIZE("open", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("open", "path", 0, 0, 0, 0); int flags; int mode = 511; int dir_fd = DEFAULT_DIR_FD; @@ -8396,7 +8523,7 @@ os_mkfifo(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *k #undef KWTUPLE PyObject *argsbuf[3]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; - path_t path = PATH_T_INITIALIZE("mkfifo", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("mkfifo", "path", 0, 0, 0, 0); int mode = 438; int dir_fd = DEFAULT_DIR_FD; @@ -8496,7 +8623,7 @@ os_mknod(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw #undef KWTUPLE PyObject *argsbuf[4]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; - path_t path = PATH_T_INITIALIZE("mknod", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("mknod", "path", 0, 0, 0, 0); int mode = 384; dev_t device = 0; int dir_fd = DEFAULT_DIR_FD; @@ -8558,7 +8685,7 @@ PyDoc_STRVAR(os_major__doc__, #define OS_MAJOR_METHODDEF \ {"major", (PyCFunction)os_major, METH_O, os_major__doc__}, -static unsigned int +static PyObject * os_major_impl(PyObject *module, dev_t device); static PyObject * @@ -8566,16 +8693,11 @@ os_major(PyObject *module, PyObject *arg) { PyObject *return_value = NULL; dev_t device; - unsigned int _return_value; if (!_Py_Dev_Converter(arg, &device)) { goto exit; } - _return_value = os_major_impl(module, device); - if ((_return_value == (unsigned int)-1) && PyErr_Occurred()) { - goto exit; - } - return_value = PyLong_FromUnsignedLong((unsigned long)_return_value); + return_value = os_major_impl(module, device); exit: return return_value; @@ -8594,7 +8716,7 @@ PyDoc_STRVAR(os_minor__doc__, #define OS_MINOR_METHODDEF \ {"minor", (PyCFunction)os_minor, METH_O, os_minor__doc__}, -static unsigned int +static PyObject * os_minor_impl(PyObject *module, dev_t device); static PyObject * @@ -8602,16 +8724,11 @@ os_minor(PyObject *module, PyObject *arg) { PyObject *return_value = NULL; dev_t device; - unsigned int _return_value; if (!_Py_Dev_Converter(arg, &device)) { goto exit; } - _return_value = os_minor_impl(module, device); - if ((_return_value == (unsigned int)-1) && PyErr_Occurred()) { - goto exit; - } - return_value = PyLong_FromUnsignedLong((unsigned long)_return_value); + return_value = os_minor_impl(module, device); exit: return return_value; @@ -8631,25 +8748,23 @@ PyDoc_STRVAR(os_makedev__doc__, {"makedev", _PyCFunction_CAST(os_makedev), METH_FASTCALL, os_makedev__doc__}, static dev_t -os_makedev_impl(PyObject *module, int major, int minor); +os_makedev_impl(PyObject *module, dev_t major, dev_t minor); static PyObject * os_makedev(PyObject *module, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; - int major; - int minor; + dev_t major; + dev_t minor; dev_t _return_value; if (!_PyArg_CheckPositional("makedev", nargs, 2, 2)) { goto exit; } - major = PyLong_AsInt(args[0]); - if (major == -1 && PyErr_Occurred()) { + if (!_Py_Dev_Converter(args[0], &major)) { goto exit; } - minor = PyLong_AsInt(args[1]); - if (minor == -1 && PyErr_Occurred()) { + if (!_Py_Dev_Converter(args[1], &minor)) { goto exit; } _return_value = os_makedev_impl(module, major, minor); @@ -8750,7 +8865,7 @@ os_truncate(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject }; #undef KWTUPLE PyObject *argsbuf[2]; - path_t path = PATH_T_INITIALIZE("truncate", "path", 0, PATH_HAVE_FTRUNCATE); + path_t path = PATH_T_INITIALIZE_P("truncate", "path", 0, 0, 0, PATH_HAVE_FTRUNCATE); Py_off_t length; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 2, 0, argsbuf); @@ -9649,7 +9764,7 @@ os_statvfs(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject * }; #undef KWTUPLE PyObject *argsbuf[1]; - path_t path = PATH_T_INITIALIZE("statvfs", "path", 0, PATH_HAVE_FSTATVFS); + path_t path = PATH_T_INITIALIZE_P("statvfs", "path", 0, 0, 0, PATH_HAVE_FSTATVFS); args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); if (!args) { @@ -9713,7 +9828,7 @@ os__getdiskusage(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyOb }; #undef KWTUPLE PyObject *argsbuf[1]; - path_t path = PATH_T_INITIALIZE("_getdiskusage", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("_getdiskusage", "path", 0, 0, 0, 0); args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); if (!args) { @@ -9827,7 +9942,7 @@ os_pathconf(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject }; #undef KWTUPLE PyObject *argsbuf[2]; - path_t path = PATH_T_INITIALIZE("pathconf", "path", 0, PATH_HAVE_FPATHCONF); + path_t path = PATH_T_INITIALIZE_P("pathconf", "path", 0, 0, 0, PATH_HAVE_FPATHCONF); int name; long _return_value; @@ -10017,10 +10132,10 @@ os_startfile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject #undef KWTUPLE PyObject *argsbuf[5]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; - path_t filepath = PATH_T_INITIALIZE("startfile", "filepath", 0, 0); + path_t filepath = PATH_T_INITIALIZE_P("startfile", "filepath", 0, 0, 0, 0); const wchar_t *operation = NULL; const wchar_t *arguments = NULL; - path_t cwd = PATH_T_INITIALIZE("startfile", "cwd", 1, 0); + path_t cwd = PATH_T_INITIALIZE_P("startfile", "cwd", 1, 0, 0, 0); int show_cmd = 1; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 5, 0, argsbuf); @@ -10355,8 +10470,8 @@ os_getxattr(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject #undef KWTUPLE PyObject *argsbuf[3]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2; - path_t path = PATH_T_INITIALIZE("getxattr", "path", 0, 1); - path_t attribute = PATH_T_INITIALIZE("getxattr", "attribute", 0, 0); + path_t path = PATH_T_INITIALIZE_P("getxattr", "path", 0, 0, 0, 1); + path_t attribute = PATH_T_INITIALIZE_P("getxattr", "attribute", 0, 0, 0, 0); int follow_symlinks = 1; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 2, 0, argsbuf); @@ -10442,8 +10557,8 @@ os_setxattr(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject #undef KWTUPLE PyObject *argsbuf[5]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 3; - path_t path = PATH_T_INITIALIZE("setxattr", "path", 0, 1); - path_t attribute = PATH_T_INITIALIZE("setxattr", "attribute", 0, 0); + path_t path = PATH_T_INITIALIZE_P("setxattr", "path", 0, 0, 0, 1); + path_t attribute = PATH_T_INITIALIZE_P("setxattr", "attribute", 0, 0, 0, 0); Py_buffer value = {NULL, NULL}; int flags = 0; int follow_symlinks = 1; @@ -10550,8 +10665,8 @@ os_removexattr(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje #undef KWTUPLE PyObject *argsbuf[3]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2; - path_t path = PATH_T_INITIALIZE("removexattr", "path", 0, 1); - path_t attribute = PATH_T_INITIALIZE("removexattr", "attribute", 0, 0); + path_t path = PATH_T_INITIALIZE_P("removexattr", "path", 0, 0, 0, 1); + path_t attribute = PATH_T_INITIALIZE_P("removexattr", "attribute", 0, 0, 0, 0); int follow_symlinks = 1; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 2, 0, argsbuf); @@ -10636,7 +10751,7 @@ os_listxattr(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject #undef KWTUPLE PyObject *argsbuf[2]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; - path_t path = PATH_T_INITIALIZE("listxattr", "path", 1, 1); + path_t path = PATH_T_INITIALIZE_P("listxattr", "path", 1, 0, 0, 1); int follow_symlinks = 1; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf); @@ -11613,7 +11728,7 @@ os_scandir(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject * #undef KWTUPLE PyObject *argsbuf[1]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; - path_t path = PATH_T_INITIALIZE("scandir", "path", 1, PATH_HAVE_FDOPENDIR); + path_t path = PATH_T_INITIALIZE_P("scandir", "path", 1, 0, 0, PATH_HAVE_FDOPENDIR); args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf); if (!args) { @@ -11825,7 +11940,7 @@ os__add_dll_directory(PyObject *module, PyObject *const *args, Py_ssize_t nargs, }; #undef KWTUPLE PyObject *argsbuf[1]; - path_t path = PATH_T_INITIALIZE("_add_dll_directory", "path", 0, 0); + path_t path = PATH_T_INITIALIZE_P("_add_dll_directory", "path", 0, 0, 0, 0); args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); if (!args) { @@ -12001,6 +12116,42 @@ os__supports_virtual_terminal(PyObject *module, PyObject *Py_UNUSED(ignored)) #endif /* defined(MS_WINDOWS) */ +PyDoc_STRVAR(os__inputhook__doc__, +"_inputhook($module, /)\n" +"--\n" +"\n" +"Calls PyOS_CallInputHook droppong the GIL first"); + +#define OS__INPUTHOOK_METHODDEF \ + {"_inputhook", (PyCFunction)os__inputhook, METH_NOARGS, os__inputhook__doc__}, + +static PyObject * +os__inputhook_impl(PyObject *module); + +static PyObject * +os__inputhook(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + return os__inputhook_impl(module); +} + +PyDoc_STRVAR(os__is_inputhook_installed__doc__, +"_is_inputhook_installed($module, /)\n" +"--\n" +"\n" +"Checks if PyOS_CallInputHook is set"); + +#define OS__IS_INPUTHOOK_INSTALLED_METHODDEF \ + {"_is_inputhook_installed", (PyCFunction)os__is_inputhook_installed, METH_NOARGS, os__is_inputhook_installed__doc__}, + +static PyObject * +os__is_inputhook_installed_impl(PyObject *module); + +static PyObject * +os__is_inputhook_installed(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + return os__is_inputhook_installed_impl(module); +} + #ifndef OS_TTYNAME_METHODDEF #define OS_TTYNAME_METHODDEF #endif /* !defined(OS_TTYNAME_METHODDEF) */ @@ -12097,6 +12248,14 @@ os__supports_virtual_terminal(PyObject *module, PyObject *Py_UNUSED(ignored)) #define OS__PATH_SPLITROOT_METHODDEF #endif /* !defined(OS__PATH_SPLITROOT_METHODDEF) */ +#ifndef OS__PATH_EXISTS_METHODDEF + #define OS__PATH_EXISTS_METHODDEF +#endif /* !defined(OS__PATH_EXISTS_METHODDEF) */ + +#ifndef OS__PATH_LEXISTS_METHODDEF + #define OS__PATH_LEXISTS_METHODDEF +#endif /* !defined(OS__PATH_LEXISTS_METHODDEF) */ + #ifndef OS__PATH_ISDIR_METHODDEF #define OS__PATH_ISDIR_METHODDEF #endif /* !defined(OS__PATH_ISDIR_METHODDEF) */ @@ -12105,14 +12264,14 @@ os__supports_virtual_terminal(PyObject *module, PyObject *Py_UNUSED(ignored)) #define OS__PATH_ISFILE_METHODDEF #endif /* !defined(OS__PATH_ISFILE_METHODDEF) */ -#ifndef OS__PATH_EXISTS_METHODDEF - #define OS__PATH_EXISTS_METHODDEF -#endif /* !defined(OS__PATH_EXISTS_METHODDEF) */ - #ifndef OS__PATH_ISLINK_METHODDEF #define OS__PATH_ISLINK_METHODDEF #endif /* !defined(OS__PATH_ISLINK_METHODDEF) */ +#ifndef OS__PATH_ISJUNCTION_METHODDEF + #define OS__PATH_ISJUNCTION_METHODDEF +#endif /* !defined(OS__PATH_ISJUNCTION_METHODDEF) */ + #ifndef OS_NICE_METHODDEF #define OS_NICE_METHODDEF #endif /* !defined(OS_NICE_METHODDEF) */ @@ -12660,4 +12819,4 @@ os__supports_virtual_terminal(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef OS__SUPPORTS_VIRTUAL_TERMINAL_METHODDEF #define OS__SUPPORTS_VIRTUAL_TERMINAL_METHODDEF #endif /* !defined(OS__SUPPORTS_VIRTUAL_TERMINAL_METHODDEF) */ -/*[clinic end generated code: output=c4698b47007cd6eb input=a9049054013a1b77]*/ +/*[clinic end generated code: output=faaa5e5ffb7b165d input=a9049054013a1b77]*/ diff --git a/Modules/clinic/pyexpat.c.h b/Modules/clinic/pyexpat.c.h index 343cb91b975038..03b74ba57155a3 100644 --- a/Modules/clinic/pyexpat.c.h +++ b/Modules/clinic/pyexpat.c.h @@ -4,7 +4,7 @@ preserve #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) # include "pycore_gc.h" // PyGC_Head -# include "pycore_runtime.h" // _Py_ID() +# include "pycore_runtime.h" // _Py_SINGLETON() #endif #include "pycore_modsupport.h" // _PyArg_UnpackKeywords() @@ -545,4 +545,4 @@ pyexpat_ErrorString(PyObject *module, PyObject *arg) #ifndef PYEXPAT_XMLPARSER_USEFOREIGNDTD_METHODDEF #define PYEXPAT_XMLPARSER_USEFOREIGNDTD_METHODDEF #endif /* !defined(PYEXPAT_XMLPARSER_USEFOREIGNDTD_METHODDEF) */ -/*[clinic end generated code: output=892e48e41f9b6e4b input=a9049054013a1b77]*/ +/*[clinic end generated code: output=729eafd7abf72a57 input=a9049054013a1b77]*/ diff --git a/Modules/itertoolsmodule.c b/Modules/itertoolsmodule.c index 8641c2f87e6db2..0d6ff20489aa2c 100644 --- a/Modules/itertoolsmodule.c +++ b/Modules/itertoolsmodule.c @@ -1,13 +1,14 @@ #include "Python.h" -#include "pycore_call.h" // _PyObject_CallNoArgs() -#include "pycore_ceval.h" // _PyEval_GetBuiltin() -#include "pycore_long.h" // _PyLong_GetZero() -#include "pycore_moduleobject.h" // _PyModule_GetState() -#include "pycore_typeobject.h" // _PyType_GetModuleState() -#include "pycore_object.h" // _PyObject_GC_TRACK() -#include "pycore_tuple.h" // _PyTuple_ITEMS() +#include "pycore_call.h" // _PyObject_CallNoArgs() +#include "pycore_ceval.h" // _PyEval_GetBuiltin() +#include "pycore_critical_section.h" // Py_BEGIN_CRITICAL_SECTION() +#include "pycore_long.h" // _PyLong_GetZero() +#include "pycore_moduleobject.h" // _PyModule_GetState() +#include "pycore_typeobject.h" // _PyType_GetModuleState() +#include "pycore_object.h" // _PyObject_GC_TRACK() +#include "pycore_tuple.h" // _PyTuple_ITEMS() -#include // offsetof() +#include // offsetof() /* Itertools module written and maintained by Raymond D. Hettinger @@ -4037,7 +4038,7 @@ fast_mode: when cnt an integer < PY_SSIZE_T_MAX and no step is specified. assert(cnt != PY_SSIZE_T_MAX && long_cnt == NULL && long_step==PyLong(1)); Advances with: cnt += 1 - When count hits Y_SSIZE_T_MAX, switch to slow_mode. + When count hits PY_SSIZE_T_MAX, switch to slow_mode. slow_mode: when cnt == PY_SSIZE_T_MAX, step is not int(1), or cnt is a float. @@ -4186,9 +4187,30 @@ count_nextlong(countobject *lz) static PyObject * count_next(countobject *lz) { +#ifndef Py_GIL_DISABLED if (lz->cnt == PY_SSIZE_T_MAX) return count_nextlong(lz); return PyLong_FromSsize_t(lz->cnt++); +#else + // free-threading version + // fast mode uses compare-exchange loop + // slow mode uses a critical section + PyObject *returned; + Py_ssize_t cnt; + + cnt = _Py_atomic_load_ssize_relaxed(&lz->cnt); + for (;;) { + if (cnt == PY_SSIZE_T_MAX) { + Py_BEGIN_CRITICAL_SECTION(lz); + returned = count_nextlong(lz); + Py_END_CRITICAL_SECTION(); + return returned; + } + if (_Py_atomic_compare_exchange_ssize(&lz->cnt, &cnt, cnt + 1)) { + return PyLong_FromSsize_t(cnt); + } + } +#endif } static PyObject * diff --git a/Modules/mmapmodule.c b/Modules/mmapmodule.c index dfc16ff4370349..99a85e9e49ad47 100644 --- a/Modules/mmapmodule.c +++ b/Modules/mmapmodule.c @@ -41,6 +41,7 @@ #ifdef MS_WINDOWS #include +#include // LsaNtStatusToWinError static int my_getpagesize(void) { @@ -255,6 +256,208 @@ do { \ } while (0) #endif /* UNIX */ +#if defined(MS_WINDOWS) && !defined(DONT_USE_SEH) +static DWORD +filter_page_exception(EXCEPTION_POINTERS *ptrs, EXCEPTION_RECORD *record) +{ + *record = *ptrs->ExceptionRecord; + if (record->ExceptionCode == EXCEPTION_IN_PAGE_ERROR || + record->ExceptionCode == EXCEPTION_ACCESS_VIOLATION) + { + return EXCEPTION_EXECUTE_HANDLER; + } + return EXCEPTION_CONTINUE_SEARCH; +} + +static DWORD +filter_page_exception_method(mmap_object *self, EXCEPTION_POINTERS *ptrs, + EXCEPTION_RECORD *record) +{ + *record = *ptrs->ExceptionRecord; + if (record->ExceptionCode == EXCEPTION_IN_PAGE_ERROR || + record->ExceptionCode == EXCEPTION_ACCESS_VIOLATION) + { + + ULONG_PTR address = record->ExceptionInformation[1]; + if (address >= (ULONG_PTR) self->data && + address < (ULONG_PTR) self->data + (ULONG_PTR) self->size) + { + return EXCEPTION_EXECUTE_HANDLER; + } + } + return EXCEPTION_CONTINUE_SEARCH; +} +#endif + +#if defined(MS_WINDOWS) && !defined(DONT_USE_SEH) +#define HANDLE_INVALID_MEM(sourcecode) \ +do { \ + EXCEPTION_RECORD record; \ + __try { \ + sourcecode \ + } \ + __except (filter_page_exception(GetExceptionInformation(), &record)) { \ + assert(record.ExceptionCode == EXCEPTION_IN_PAGE_ERROR || \ + record.ExceptionCode == EXCEPTION_ACCESS_VIOLATION); \ + if (record.ExceptionCode == EXCEPTION_IN_PAGE_ERROR) { \ + NTSTATUS status = (NTSTATUS) record.ExceptionInformation[2]; \ + ULONG code = LsaNtStatusToWinError(status); \ + PyErr_SetFromWindowsErr(code); \ + } \ + else if (record.ExceptionCode == EXCEPTION_ACCESS_VIOLATION) { \ + PyErr_SetFromWindowsErr(ERROR_NOACCESS); \ + } \ + return -1; \ + } \ +} while (0) +#else +#define HANDLE_INVALID_MEM(sourcecode) \ +do { \ + sourcecode \ +} while (0) +#endif + +#if defined(MS_WINDOWS) && !defined(DONT_USE_SEH) +#define HANDLE_INVALID_MEM_METHOD(self, sourcecode) \ +do { \ + EXCEPTION_RECORD record; \ + __try { \ + sourcecode \ + } \ + __except (filter_page_exception_method(self, GetExceptionInformation(), \ + &record)) { \ + assert(record.ExceptionCode == EXCEPTION_IN_PAGE_ERROR || \ + record.ExceptionCode == EXCEPTION_ACCESS_VIOLATION); \ + if (record.ExceptionCode == EXCEPTION_IN_PAGE_ERROR) { \ + NTSTATUS status = (NTSTATUS) record.ExceptionInformation[2]; \ + ULONG code = LsaNtStatusToWinError(status); \ + PyErr_SetFromWindowsErr(code); \ + } \ + else if (record.ExceptionCode == EXCEPTION_ACCESS_VIOLATION) { \ + PyErr_SetFromWindowsErr(ERROR_NOACCESS); \ + } \ + return -1; \ + } \ +} while (0) +#else +#define HANDLE_INVALID_MEM_METHOD(self, sourcecode) \ +do { \ + sourcecode \ +} while (0) +#endif + +int +safe_memcpy(void *dest, const void *src, size_t count) +{ + HANDLE_INVALID_MEM( + memcpy(dest, src, count); + ); + return 0; +} + +int +safe_byte_copy(char *dest, const char *src) +{ + HANDLE_INVALID_MEM( + *dest = *src; + ); + return 0; +} + +int +safe_memchr(char **out, const void *ptr, int ch, size_t count) +{ + HANDLE_INVALID_MEM( + *out = (char *) memchr(ptr, ch, count); + ); + return 0; +} + +int +safe_memmove(void *dest, const void *src, size_t count) +{ + HANDLE_INVALID_MEM( + memmove(dest, src, count); + ); + return 0; +} + +int +safe_copy_from_slice(char *dest, const char *src, Py_ssize_t start, + Py_ssize_t step, Py_ssize_t slicelen) +{ + HANDLE_INVALID_MEM( + size_t cur; + Py_ssize_t i; + for (cur = start, i = 0; i < slicelen; cur += step, i++) { + dest[cur] = src[i]; + } + ); + return 0; +} + +int +safe_copy_to_slice(char *dest, const char *src, Py_ssize_t start, + Py_ssize_t step, Py_ssize_t slicelen) +{ + HANDLE_INVALID_MEM( + size_t cur; + Py_ssize_t i; + for (cur = start, i = 0; i < slicelen; cur += step, i++) { + dest[i] = src[cur]; + } + ); + return 0; +} + + +int +_safe_PyBytes_Find(Py_ssize_t *out, mmap_object *self, const char *haystack, + Py_ssize_t len_haystack, const char *needle, + Py_ssize_t len_needle, Py_ssize_t offset) +{ + HANDLE_INVALID_MEM_METHOD(self, + *out = _PyBytes_Find(haystack, len_haystack, needle, len_needle, offset); + ); + return 0; +} + +int +_safe_PyBytes_ReverseFind(Py_ssize_t *out, mmap_object *self, + const char *haystack, Py_ssize_t len_haystack, + const char *needle, Py_ssize_t len_needle, + Py_ssize_t offset) +{ + HANDLE_INVALID_MEM_METHOD(self, + *out = _PyBytes_ReverseFind(haystack, len_haystack, needle, len_needle, + offset); + ); + return 0; +} + +PyObject * +_safe_PyBytes_FromStringAndSize(char *start, size_t num_bytes) { + if (num_bytes == 1) { + char dest; + if (safe_byte_copy(&dest, start) < 0) { + return NULL; + } + else { + return PyBytes_FromStringAndSize(&dest, 1); + } + } + else { + PyObject *result = PyBytes_FromStringAndSize(NULL, num_bytes); + if (result == NULL) { + return NULL; + } + if (safe_memcpy(PyBytes_AS_STRING(result), start, num_bytes) < 0) { + Py_CLEAR(result); + } + return result; + } +} + static PyObject * mmap_read_byte_method(mmap_object *self, PyObject *Py_UNUSED(ignored)) @@ -264,7 +467,12 @@ mmap_read_byte_method(mmap_object *self, PyErr_SetString(PyExc_ValueError, "read byte out of range"); return NULL; } - return PyLong_FromLong((unsigned char)self->data[self->pos++]); + char dest; + if (safe_byte_copy(&dest, self->data + self->pos) < 0) { + return NULL; + } + self->pos++; + return PyLong_FromLong((unsigned char) dest); } static PyObject * @@ -273,7 +481,6 @@ mmap_read_line_method(mmap_object *self, { Py_ssize_t remaining; char *start, *eol; - PyObject *result; CHECK_VALID(NULL); @@ -281,13 +488,20 @@ mmap_read_line_method(mmap_object *self, if (!remaining) return PyBytes_FromString(""); start = self->data + self->pos; - eol = memchr(start, '\n', remaining); + + if (safe_memchr(&eol, start, '\n', remaining) < 0) { + return NULL; + } + if (!eol) eol = self->data + self->size; else ++eol; /* advance past newline */ - result = PyBytes_FromStringAndSize(start, (eol - start)); - self->pos += (eol - start); + + PyObject *result = _safe_PyBytes_FromStringAndSize(start, eol - start); + if (result != NULL) { + self->pos += (eol - start); + } return result; } @@ -296,7 +510,6 @@ mmap_read_method(mmap_object *self, PyObject *args) { Py_ssize_t num_bytes = PY_SSIZE_T_MAX, remaining; - PyObject *result; CHECK_VALID(NULL); if (!PyArg_ParseTuple(args, "|O&:read", _Py_convert_optional_to_ssize_t, &num_bytes)) @@ -307,8 +520,12 @@ mmap_read_method(mmap_object *self, remaining = (self->pos < self->size) ? self->size - self->pos : 0; if (num_bytes < 0 || num_bytes > remaining) num_bytes = remaining; - result = PyBytes_FromStringAndSize(&self->data[self->pos], num_bytes); - self->pos += num_bytes; + + PyObject *result = _safe_PyBytes_FromStringAndSize(self->data + self->pos, + num_bytes); + if (result != NULL) { + self->pos += num_bytes; + } return result; } @@ -341,25 +558,38 @@ mmap_gfind(mmap_object *self, else if (end > self->size) end = self->size; - Py_ssize_t res; + Py_ssize_t index; + PyObject *result; CHECK_VALID_OR_RELEASE(NULL, view); if (end < start) { - res = -1; + result = PyLong_FromSsize_t(-1); } else if (reverse) { assert(0 <= start && start <= end && end <= self->size); - res = _PyBytes_ReverseFind( + if (_safe_PyBytes_ReverseFind(&index, self, self->data + start, end - start, - view.buf, view.len, start); + view.buf, view.len, start) < 0) + { + result = NULL; + } + else { + result = PyLong_FromSsize_t(index); + } } else { assert(0 <= start && start <= end && end <= self->size); - res = _PyBytes_Find( + if (_safe_PyBytes_Find(&index, self, self->data + start, end - start, - view.buf, view.len, start); + view.buf, view.len, start) < 0) + { + result = NULL; + } + else { + result = PyLong_FromSsize_t(index); + } } PyBuffer_Release(&view); - return PyLong_FromSsize_t(res); + return result; } } @@ -432,10 +662,16 @@ mmap_write_method(mmap_object *self, } CHECK_VALID_OR_RELEASE(NULL, data); - memcpy(&self->data[self->pos], data.buf, data.len); - self->pos += data.len; + PyObject *result; + if (safe_memcpy(self->data + self->pos, data.buf, data.len) < 0) { + result = NULL; + } + else { + self->pos += data.len; + result = PyLong_FromSsize_t(data.len); + } PyBuffer_Release(&data); - return PyLong_FromSsize_t(data.len); + return result; } static PyObject * @@ -452,14 +688,16 @@ mmap_write_byte_method(mmap_object *self, return NULL; CHECK_VALID(NULL); - if (self->pos < self->size) { - self->data[self->pos++] = value; - Py_RETURN_NONE; - } - else { + if (self->pos >= self->size) { PyErr_SetString(PyExc_ValueError, "write byte out of range"); return NULL; } + + if (safe_byte_copy(self->data + self->pos, &value) < 0) { + return NULL; + } + self->pos++; + Py_RETURN_NONE; } static PyObject * @@ -763,8 +1001,9 @@ mmap_move_method(mmap_object *self, PyObject *args) goto bounds; CHECK_VALID(NULL); - memmove(&self->data[dest], &self->data[src], cnt); - + if (safe_memmove(self->data + dest, self->data + src, cnt) < 0) { + return NULL; + }; Py_RETURN_NONE; bounds: @@ -855,6 +1094,29 @@ mmap__sizeof__method(mmap_object *self, void *Py_UNUSED(ignored)) } #endif +#if defined(MS_WINDOWS) && defined(Py_DEBUG) +static PyObject * +mmap_protect_method(mmap_object *self, PyObject *args) { + DWORD flNewProtect, flOldProtect; + Py_ssize_t start, length; + + CHECK_VALID(NULL); + + if (!PyArg_ParseTuple(args, "Inn:protect", &flNewProtect, &start, &length)) { + return NULL; + } + + if (!VirtualProtect((void *) (self->data + start), length, flNewProtect, + &flOldProtect)) + { + PyErr_SetFromWindowsErr(GetLastError()); + return NULL; + } + + Py_RETURN_NONE; +} +#endif + #ifdef HAVE_MADVISE static PyObject * mmap_madvise_method(mmap_object *self, PyObject *args) @@ -924,7 +1186,10 @@ static struct PyMethodDef mmap_object_methods[] = { {"__exit__", (PyCFunction) mmap__exit__method, METH_VARARGS}, #ifdef MS_WINDOWS {"__sizeof__", (PyCFunction) mmap__sizeof__method, METH_NOARGS}, -#endif +#ifdef Py_DEBUG + {"_protect", (PyCFunction) mmap_protect_method, METH_VARARGS}, +#endif // Py_DEBUG +#endif // MS_WINDOWS {NULL, NULL} /* sentinel */ }; @@ -968,7 +1233,12 @@ mmap_item(mmap_object *self, Py_ssize_t i) PyErr_SetString(PyExc_IndexError, "mmap index out of range"); return NULL; } - return PyBytes_FromStringAndSize(self->data + i, 1); + + char dest; + if (safe_byte_copy(&dest, self->data + i) < 0) { + return NULL; + } + return PyBytes_FromStringAndSize(&dest, 1); } static PyObject * @@ -987,7 +1257,12 @@ mmap_subscript(mmap_object *self, PyObject *item) return NULL; } CHECK_VALID(NULL); - return PyLong_FromLong(Py_CHARMASK(self->data[i])); + + char dest; + if (safe_byte_copy(&dest, self->data + i) < 0) { + return NULL; + } + return PyLong_FromLong(Py_CHARMASK(dest)); } else if (PySlice_Check(item)) { Py_ssize_t start, stop, step, slicelen; @@ -1001,23 +1276,22 @@ mmap_subscript(mmap_object *self, PyObject *item) if (slicelen <= 0) return PyBytes_FromStringAndSize("", 0); else if (step == 1) - return PyBytes_FromStringAndSize(self->data + start, - slicelen); + return _safe_PyBytes_FromStringAndSize(self->data + start, slicelen); else { char *result_buf = (char *)PyMem_Malloc(slicelen); - size_t cur; - Py_ssize_t i; PyObject *result; if (result_buf == NULL) return PyErr_NoMemory(); - for (cur = start, i = 0; i < slicelen; - cur += step, i++) { - result_buf[i] = self->data[cur]; + if (safe_copy_to_slice(result_buf, self->data, start, step, + slicelen) < 0) + { + result = NULL; + } + else { + result = PyBytes_FromStringAndSize(result_buf, slicelen); } - result = PyBytes_FromStringAndSize(result_buf, - slicelen); PyMem_Free(result_buf); return result; } @@ -1052,7 +1326,10 @@ mmap_ass_item(mmap_object *self, Py_ssize_t i, PyObject *v) if (!is_writable(self)) return -1; buf = PyBytes_AsString(v); - self->data[i] = buf[0]; + + if (safe_byte_copy(self->data + i, buf) < 0) { + return -1; + } return 0; } @@ -1097,7 +1374,11 @@ mmap_ass_subscript(mmap_object *self, PyObject *item, PyObject *value) return -1; } CHECK_VALID(-1); - self->data[i] = (char) v; + + char v_char = (char) v; + if (safe_byte_copy(self->data + i, &v_char) < 0) { + return -1; + } return 0; } else if (PySlice_Check(item)) { @@ -1123,24 +1404,23 @@ mmap_ass_subscript(mmap_object *self, PyObject *item, PyObject *value) } CHECK_VALID_OR_RELEASE(-1, vbuf); + int result = 0; if (slicelen == 0) { } else if (step == 1) { - memcpy(self->data + start, vbuf.buf, slicelen); + if (safe_memcpy(self->data + start, vbuf.buf, slicelen) < 0) { + result = -1; + } } else { - size_t cur; - Py_ssize_t i; - - for (cur = start, i = 0; - i < slicelen; - cur += step, i++) + if (safe_copy_from_slice(self->data, (char *)vbuf.buf, start, step, + slicelen) < 0) { - self->data[cur] = ((char *)vbuf.buf)[i]; + result = -1; } } PyBuffer_Release(&vbuf); - return 0; + return result; } else { PyErr_SetString(PyExc_TypeError, diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 9f4be98b35186e..5f943d4b1c8085 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -16,8 +16,8 @@ #include "pycore_call.h" // _PyObject_CallNoArgs() #include "pycore_ceval.h" // _PyEval_ReInitThreads() #include "pycore_fileutils.h" // _Py_closerange() -#include "pycore_import.h" // _PyImport_ReInitLock() #include "pycore_initconfig.h" // _PyStatus_EXCEPTION() +#include "pycore_long.h" // _PyLong_IsNegative() #include "pycore_moduleobject.h" // _PyModule_GetState() #include "pycore_object.h" // _PyObject_LookupSpecial() #include "pycore_pylifecycle.h" // _PyOS_URandom() @@ -626,10 +626,7 @@ PyOS_AfterFork_Parent(void) _PyEval_StartTheWorldAll(&_PyRuntime); PyInterpreterState *interp = _PyInterpreterState_GET(); - if (_PyImport_ReleaseLock(interp) <= 0) { - Py_FatalError("failed releasing import lock after fork"); - } - + _PyImport_ReleaseLock(interp); run_at_forkers(interp->after_forkers_parent, 0); } @@ -674,10 +671,7 @@ PyOS_AfterFork_Child(void) _PyEval_StartTheWorldAll(&_PyRuntime); _PyThreadState_DeleteList(list); - status = _PyImport_ReInitLock(tstate->interp); - if (_PyStatus_EXCEPTION(status)) { - goto fatal_error; - } + _PyImport_ReleaseLock(tstate->interp); _PySignal_AfterFork(); @@ -967,16 +961,46 @@ _Py_Gid_Converter(PyObject *obj, gid_t *p) #endif /* MS_WINDOWS */ -#define _PyLong_FromDev PyLong_FromLongLong +static PyObject * +_PyLong_FromDev(dev_t dev) +{ +#ifdef NODEV + if (dev == NODEV) { + return PyLong_FromLongLong((long long)dev); + } +#endif + return PyLong_FromUnsignedLongLong((unsigned long long)dev); +} #if (defined(HAVE_MKNOD) && defined(HAVE_MAKEDEV)) || defined(HAVE_DEVICE_MACROS) static int _Py_Dev_Converter(PyObject *obj, void *p) { - *((dev_t *)p) = PyLong_AsUnsignedLongLong(obj); - if (PyErr_Occurred()) +#ifdef NODEV + if (PyLong_Check(obj) && _PyLong_IsNegative((PyLongObject *)obj)) { + int overflow; + long long result = PyLong_AsLongLongAndOverflow(obj, &overflow); + if (result == -1 && PyErr_Occurred()) { + return 0; + } + if (!overflow && result == (long long)NODEV) { + *((dev_t *)p) = NODEV; + return 1; + } + } +#endif + + unsigned long long result = PyLong_AsUnsignedLongLong(obj); + if (result == (unsigned long long)-1 && PyErr_Occurred()) { return 0; + } + if ((unsigned long long)(dev_t)result != result) { + PyErr_SetString(PyExc_OverflowError, + "Python int too large to convert to C dev_t"); + return 0; + } + *((dev_t *)p) = (dev_t)result; return 1; } #endif /* (HAVE_MKNOD && HAVE_MAKEDEV) || HAVE_DEVICE_MACROS */ @@ -1092,16 +1116,15 @@ get_posix_state(PyObject *module) * * path_converter accepts (Unicode) strings and their * subclasses, and bytes and their subclasses. What - * it does with the argument depends on the platform: + * it does with the argument depends on path.make_wide: * - * * On Windows, if we get a (Unicode) string we - * extract the wchar_t * and return it; if we get - * bytes we decode to wchar_t * and return that. + * * If path.make_wide is nonzero, if we get a (Unicode) + * string we extract the wchar_t * and return it; if we + * get bytes we decode to wchar_t * and return that. * - * * On all other platforms, strings are encoded - * to bytes using PyUnicode_FSConverter, then we - * extract the char * from the bytes object and - * return that. + * * If path.make_wide is zero, if we get bytes we extract + * the char_t * and return it; if we get a (Unicode) + * string we encode to char_t * and return that. * * path_converter also optionally accepts signed * integers (representing open file descriptors) instead @@ -1110,6 +1133,15 @@ get_posix_state(PyObject *module) * Input fields: * path.nullable * If nonzero, the path is permitted to be None. + * path.nonstrict + * If nonzero, the path is permitted to contain + * embedded null characters and have any length. + * path.make_wide + * If nonzero, the converter always uses wide, decoding if necessary, else + * it always uses narrow, encoding if necessary. The default value is + * nonzero on Windows, else zero. + * path.suppress_value_error + * If nonzero, raising ValueError is suppressed. * path.allow_fd * If nonzero, the path is permitted to be a file handle * (a signed int) instead of a string. @@ -1125,12 +1157,10 @@ get_posix_state(PyObject *module) * Output fields: * path.wide * Points to the path if it was expressed as Unicode - * and was not encoded. (Only used on Windows.) + * or if it was bytes and decoded to Unicode. * path.narrow * Points to the path if it was expressed as bytes, - * or it was Unicode and was encoded to bytes. (On Windows, - * is a non-zero integer if the path was expressed as bytes. - * The type is deliberately incompatible to prevent misuse.) + * or if it was Unicode and encoded to bytes. * path.fd * Contains a file descriptor if path.accept_fd was true * and the caller provided a signed integer instead of any @@ -1140,6 +1170,9 @@ get_posix_state(PyObject *module) * unspecified, path_converter will never get called. * So if you set allow_fd, you *MUST* initialize path.fd = -1 * yourself! + * path.value_error + * If nonzero, then suppress_value_error was specified and a ValueError + * occurred. * path.length * The length of the path in characters, if specified as * a string. @@ -1172,28 +1205,38 @@ get_posix_state(PyObject *module) * path_cleanup(). However it is safe to do so.) */ typedef struct { + // Input fields const char *function_name; const char *argument_name; int nullable; + int nonstrict; + int make_wide; + int suppress_value_error; int allow_fd; + // Output fields const wchar_t *wide; -#ifdef MS_WINDOWS - BOOL narrow; -#else const char *narrow; -#endif int fd; + int value_error; Py_ssize_t length; PyObject *object; PyObject *cleanup; } path_t; +#define PATH_T_INITIALIZE(function_name, argument_name, nullable, nonstrict, \ + make_wide, suppress_value_error, allow_fd) \ + {function_name, argument_name, nullable, nonstrict, make_wide, \ + suppress_value_error, allow_fd, NULL, NULL, -1, 0, 0, NULL, NULL} #ifdef MS_WINDOWS -#define PATH_T_INITIALIZE(function_name, argument_name, nullable, allow_fd) \ - {function_name, argument_name, nullable, allow_fd, NULL, FALSE, -1, 0, NULL, NULL} +#define PATH_T_INITIALIZE_P(function_name, argument_name, nullable, \ + nonstrict, suppress_value_error, allow_fd) \ + PATH_T_INITIALIZE(function_name, argument_name, nullable, nonstrict, 1, \ + suppress_value_error, allow_fd) #else -#define PATH_T_INITIALIZE(function_name, argument_name, nullable, allow_fd) \ - {function_name, argument_name, nullable, allow_fd, NULL, NULL, -1, 0, NULL, NULL} +#define PATH_T_INITIALIZE_P(function_name, argument_name, nullable, \ + nonstrict, suppress_value_error, allow_fd) \ + PATH_T_INITIALIZE(function_name, argument_name, nullable, nonstrict, 0, \ + suppress_value_error, allow_fd) #endif static void @@ -1214,10 +1257,8 @@ path_converter(PyObject *o, void *p) Py_ssize_t length = 0; int is_index, is_bytes, is_unicode; const char *narrow; -#ifdef MS_WINDOWS PyObject *wo = NULL; wchar_t *wide = NULL; -#endif #define FORMAT_EXCEPTION(exc, fmt) \ PyErr_Format(exc, "%s%s" fmt, \ @@ -1238,11 +1279,7 @@ path_converter(PyObject *o, void *p) if ((o == Py_None) && path->nullable) { path->wide = NULL; -#ifdef MS_WINDOWS - path->narrow = FALSE; -#else path->narrow = NULL; -#endif path->fd = -1; goto success_exit; } @@ -1286,30 +1323,33 @@ path_converter(PyObject *o, void *p) } if (is_unicode) { + if (path->make_wide) { + wide = PyUnicode_AsWideCharString(o, &length); + if (!wide) { + goto error_exit; + } #ifdef MS_WINDOWS - wide = PyUnicode_AsWideCharString(o, &length); - if (!wide) { - goto error_exit; - } - if (length > 32767) { - FORMAT_EXCEPTION(PyExc_ValueError, "%s too long for Windows"); - goto error_exit; - } - if (wcslen(wide) != length) { - FORMAT_EXCEPTION(PyExc_ValueError, "embedded null character in %s"); - goto error_exit; - } + if (!path->nonstrict && length > 32767) { + FORMAT_EXCEPTION(PyExc_ValueError, "%s too long for Windows"); + goto error_exit; + } +#endif + if (!path->nonstrict && wcslen(wide) != (size_t)length) { + FORMAT_EXCEPTION(PyExc_ValueError, + "embedded null character in %s"); + goto error_exit; + } - path->wide = wide; - path->narrow = FALSE; - path->fd = -1; - wide = NULL; - goto success_exit; -#else - if (!PyUnicode_FSConverter(o, &bytes)) { + path->wide = wide; + path->narrow = NULL; + path->fd = -1; + wide = NULL; + goto success_exit; + } + bytes = PyUnicode_EncodeFSDefault(o); + if (!bytes) { goto error_exit; } -#endif } else if (is_bytes) { bytes = Py_NewRef(o); @@ -1319,11 +1359,7 @@ path_converter(PyObject *o, void *p) goto error_exit; } path->wide = NULL; -#ifdef MS_WINDOWS - path->narrow = FALSE; -#else path->narrow = NULL; -#endif goto success_exit; } else { @@ -1343,52 +1379,54 @@ path_converter(PyObject *o, void *p) length = PyBytes_GET_SIZE(bytes); narrow = PyBytes_AS_STRING(bytes); - if ((size_t)length != strlen(narrow)) { + if (!path->nonstrict && strlen(narrow) != (size_t)length) { FORMAT_EXCEPTION(PyExc_ValueError, "embedded null character in %s"); goto error_exit; } -#ifdef MS_WINDOWS - wo = PyUnicode_DecodeFSDefaultAndSize( - narrow, - length - ); - if (!wo) { - goto error_exit; - } + if (path->make_wide) { + wo = PyUnicode_DecodeFSDefaultAndSize(narrow, length); + if (!wo) { + goto error_exit; + } - wide = PyUnicode_AsWideCharString(wo, &length); - Py_DECREF(wo); - if (!wide) { - goto error_exit; - } - if (length > 32767) { - FORMAT_EXCEPTION(PyExc_ValueError, "%s too long for Windows"); - goto error_exit; - } - if (wcslen(wide) != length) { - FORMAT_EXCEPTION(PyExc_ValueError, "embedded null character in %s"); - goto error_exit; - } - path->wide = wide; - path->narrow = TRUE; - Py_DECREF(bytes); - wide = NULL; -#else - path->wide = NULL; - path->narrow = narrow; - if (bytes == o) { - /* Still a reference owned by path->object, don't have to - worry about path->narrow is used after free. */ + wide = PyUnicode_AsWideCharString(wo, &length); + Py_DECREF(wo); + if (!wide) { + goto error_exit; + } +#ifdef MS_WINDOWS + if (!path->nonstrict && length > 32767) { + FORMAT_EXCEPTION(PyExc_ValueError, "%s too long for Windows"); + goto error_exit; + } +#endif + if (!path->nonstrict && wcslen(wide) != (size_t)length) { + FORMAT_EXCEPTION(PyExc_ValueError, + "embedded null character in %s"); + goto error_exit; + } + path->wide = wide; + path->narrow = NULL; Py_DECREF(bytes); + wide = NULL; } else { - path->cleanup = bytes; + path->wide = NULL; + path->narrow = narrow; + if (bytes == o) { + /* Still a reference owned by path->object, don't have to + worry about path->narrow is used after free. */ + Py_DECREF(bytes); + } + else { + path->cleanup = bytes; + } } -#endif path->fd = -1; success_exit: + path->value_error = 0; path->length = length; path->object = o; return Py_CLEANUP_SUPPORTED; @@ -1396,10 +1434,20 @@ path_converter(PyObject *o, void *p) error_exit: Py_XDECREF(o); Py_XDECREF(bytes); -#ifdef MS_WINDOWS PyMem_Free(wide); -#endif - return 0; + if (!path->suppress_value_error || + !PyErr_ExceptionMatches(PyExc_ValueError)) + { + return 0; + } + PyErr_Clear(); + path->wide = NULL; + path->narrow = NULL; + path->fd = -1; + path->value_error = 1; + path->length = 0; + path->object = NULL; + return Py_CLEANUP_SUPPORTED; } static void @@ -1449,11 +1497,7 @@ follow_symlinks_specified(const char *function_name, int follow_symlinks) static int path_and_dir_fd_invalid(const char *function_name, path_t *path, int dir_fd) { - if (!path->wide && (dir_fd != DEFAULT_DIR_FD) -#ifndef MS_WINDOWS - && !path->narrow -#endif - ) { + if (!path->wide && (dir_fd != DEFAULT_DIR_FD) && !path->narrow) { PyErr_Format(PyExc_ValueError, "%s: can't specify dir_fd without matching path", function_name); @@ -2913,7 +2957,9 @@ class path_t_converter(CConverter): converter = 'path_converter' - def converter_init(self, *, allow_fd=False, nullable=False): + def converter_init(self, *, allow_fd=False, make_wide=None, + nonstrict=False, nullable=False, + suppress_value_error=False): # right now path_t doesn't support default values. # to support a default value, you'll need to override initialize(). if self.default not in (unspecified, None): @@ -2923,6 +2969,9 @@ class path_t_converter(CConverter): raise RuntimeError("Can't specify a c_default to the path_t converter!") self.nullable = nullable + self.nonstrict = nonstrict + self.make_wide = make_wide + self.suppress_value_error = suppress_value_error self.allow_fd = allow_fd def pre_render(self): @@ -2932,11 +2981,24 @@ class path_t_converter(CConverter): return str(int(bool(value))) # add self.py_name here when merging with posixmodule conversion - self.c_default = 'PATH_T_INITIALIZE("{}", "{}", {}, {})'.format( - self.function.name, - self.name, - strify(self.nullable), - strify(self.allow_fd), + if self.make_wide is None: + self.c_default = 'PATH_T_INITIALIZE_P("{}", "{}", {}, {}, {}, {})'.format( + self.function.name, + self.name, + strify(self.nullable), + strify(self.nonstrict), + strify(self.suppress_value_error), + strify(self.allow_fd), + ) + else: + self.c_default = 'PATH_T_INITIALIZE("{}", "{}", {}, {}, {}, {}, {})'.format( + self.function.name, + self.name, + strify(self.nullable), + strify(self.nonstrict), + strify(self.make_wide), + strify(self.suppress_value_error), + strify(self.allow_fd), ) def cleanup(self): @@ -3016,7 +3078,7 @@ class sysconf_confname_converter(path_confname_converter): converter="conv_sysconf_confname" [python start generated code]*/ -/*[python end generated code: output=da39a3ee5e6b4b0d input=3338733161aa7879]*/ +/*[python end generated code: output=da39a3ee5e6b4b0d input=577cb476e5d64960]*/ /*[clinic input] @@ -4285,7 +4347,7 @@ _listdir_windows_no_opendir(path_t *path, PyObject *list) { PyObject *v; HANDLE hFindFile = INVALID_HANDLE_VALUE; - BOOL result; + BOOL result, return_bytes; wchar_t namebuf[MAX_PATH+4]; /* Overallocate for "\*.*" */ /* only claim to have space for MAX_PATH */ Py_ssize_t len = Py_ARRAY_LENGTH(namebuf)-4; @@ -4297,9 +4359,11 @@ _listdir_windows_no_opendir(path_t *path, PyObject *list) if (!path->wide) { /* Default arg: "." */ po_wchars = L"."; len = 1; + return_bytes = 0; } else { po_wchars = path->wide; len = wcslen(path->wide); + return_bytes = PyBytes_Check(path->object); } /* The +5 is so we can append "\\*.*\0" */ wnamebuf = PyMem_New(wchar_t, len + 5); @@ -4334,7 +4398,7 @@ _listdir_windows_no_opendir(path_t *path, PyObject *list) wcscmp(wFileData.cFileName, L"..") != 0) { v = PyUnicode_FromWideChar(wFileData.cFileName, wcslen(wFileData.cFileName)); - if (path->narrow && v) { + if (return_bytes && v) { Py_SETREF(v, PyUnicode_EncodeFSDefault(v)); } if (v == NULL) { @@ -4877,7 +4941,7 @@ os__getfullpathname_impl(PyObject *module, path_t *path) if (str == NULL) { return NULL; } - if (path->narrow) { + if (PyBytes_Check(path->object)) { Py_SETREF(str, PyUnicode_EncodeFSDefault(str)); } return str; @@ -4950,7 +5014,7 @@ os__getfinalpathname_impl(PyObject *module, path_t *path) } result = PyUnicode_FromWideChar(target_path, result_length); - if (result && path->narrow) { + if (result && PyBytes_Check(path->object)) { Py_SETREF(result, PyUnicode_EncodeFSDefault(result)); } @@ -5033,7 +5097,7 @@ os__getvolumepathname_impl(PyObject *module, path_t *path) goto exit; } result = PyUnicode_FromWideChar(mountpath, wcslen(mountpath)); - if (path->narrow) + if (PyBytes_Check(path->object)) Py_SETREF(result, PyUnicode_EncodeFSDefault(result)); exit: @@ -5088,407 +5152,367 @@ os__path_splitroot_impl(PyObject *module, path_t *path) } -/*[clinic input] -os._path_isdir - - s: 'O' +#define PY_IFREG 1 // Regular file +#define PY_IFDIR 2 // Directory +#define PY_IFLNK 4 // Symlink +#define PY_IFMNT 8 // Mount Point (junction) +#define PY_IFLRP 16 // Link Reparse Point (name-surrogate, symlink, junction) +#define PY_IFRRP 32 // Regular Reparse Point -Return true if the pathname refers to an existing directory. +static inline BOOL +_testInfo(DWORD attributes, DWORD reparseTag, BOOL diskDevice, int testedType) +{ + switch (testedType) { + case PY_IFREG: + return diskDevice && attributes && + !(attributes & FILE_ATTRIBUTE_DIRECTORY); + case PY_IFDIR: + return attributes & FILE_ATTRIBUTE_DIRECTORY; + case PY_IFLNK: + return (attributes & FILE_ATTRIBUTE_REPARSE_POINT) && + reparseTag == IO_REPARSE_TAG_SYMLINK; + case PY_IFMNT: + return (attributes & FILE_ATTRIBUTE_REPARSE_POINT) && + reparseTag == IO_REPARSE_TAG_MOUNT_POINT; + case PY_IFLRP: + return (attributes & FILE_ATTRIBUTE_REPARSE_POINT) && + IsReparseTagNameSurrogate(reparseTag); + case PY_IFRRP: + return (attributes & FILE_ATTRIBUTE_REPARSE_POINT) && + reparseTag && !IsReparseTagNameSurrogate(reparseTag); + } -[clinic start generated code]*/ + return FALSE; +} -static PyObject * -os__path_isdir_impl(PyObject *module, PyObject *s) -/*[clinic end generated code: output=9d87ab3c8b8a4e61 input=c17f7ef21d22d64e]*/ +static BOOL +_testFileTypeByHandle(HANDLE hfile, int testedType, BOOL diskOnly) { - HANDLE hfile; - BOOL close_file = TRUE; + assert(testedType == PY_IFREG || testedType == PY_IFDIR || + testedType == PY_IFLNK || testedType == PY_IFMNT || + testedType == PY_IFLRP || testedType == PY_IFRRP); + + BOOL diskDevice = GetFileType(hfile) == FILE_TYPE_DISK; + if (diskOnly && !diskDevice) { + return FALSE; + } + if (testedType != PY_IFREG && testedType != PY_IFDIR) { + FILE_ATTRIBUTE_TAG_INFO info; + return GetFileInformationByHandleEx(hfile, FileAttributeTagInfo, &info, + sizeof(info)) && + _testInfo(info.FileAttributes, info.ReparseTag, diskDevice, + testedType); + } FILE_BASIC_INFO info; - path_t _path = PATH_T_INITIALIZE("isdir", "s", 0, 1); - int result; - BOOL slow_path = TRUE; - FILE_STAT_BASIC_INFORMATION statInfo; + return GetFileInformationByHandleEx(hfile, FileBasicInfo, &info, + sizeof(info)) && + _testInfo(info.FileAttributes, 0, diskDevice, testedType); +} - if (!path_converter(s, &_path)) { - path_cleanup(&_path); - if (PyErr_ExceptionMatches(PyExc_ValueError)) { - PyErr_Clear(); - Py_RETURN_FALSE; +static BOOL +_testFileTypeByName(LPCWSTR path, int testedType) +{ + assert(testedType == PY_IFREG || testedType == PY_IFDIR || + testedType == PY_IFLNK || testedType == PY_IFMNT || + testedType == PY_IFLRP || testedType == PY_IFRRP); + + FILE_STAT_BASIC_INFORMATION info; + if (_Py_GetFileInformationByName(path, FileStatBasicByNameInfo, &info, + sizeof(info))) + { + BOOL diskDevice = info.DeviceType == FILE_DEVICE_DISK || + info.DeviceType == FILE_DEVICE_VIRTUAL_DISK || + info.DeviceType == FILE_DEVICE_CD_ROM; + BOOL result = _testInfo(info.FileAttributes, info.ReparseTag, + diskDevice, testedType); + if (!result || (testedType != PY_IFREG && testedType != PY_IFDIR) || + !(info.FileAttributes & FILE_ATTRIBUTE_REPARSE_POINT)) + { + return result; } - return NULL; + } + else if (_Py_GetFileInformationByName_ErrorIsTrustworthy( + GetLastError())) + { + return FALSE; } - Py_BEGIN_ALLOW_THREADS - if (_path.wide) { - if (_Py_GetFileInformationByName(_path.wide, FileStatBasicByNameInfo, - &statInfo, sizeof(statInfo))) { - if (!(statInfo.FileAttributes & FILE_ATTRIBUTE_REPARSE_POINT)) { - slow_path = FALSE; - result = statInfo.FileAttributes & FILE_ATTRIBUTE_DIRECTORY; - } else if (!(statInfo.FileAttributes & FILE_ATTRIBUTE_DIRECTORY)) { - slow_path = FALSE; - result = 0; - } - } else if (_Py_GetFileInformationByName_ErrorIsTrustworthy(GetLastError())) { - slow_path = FALSE; - result = 0; - } + DWORD flags = FILE_FLAG_BACKUP_SEMANTICS; + if (testedType != PY_IFREG && testedType != PY_IFDIR) { + flags |= FILE_FLAG_OPEN_REPARSE_POINT; } - if (slow_path) { - if (_path.fd != -1) { - hfile = _Py_get_osfhandle_noraise(_path.fd); - close_file = FALSE; + HANDLE hfile = CreateFileW(path, FILE_READ_ATTRIBUTES, 0, NULL, + OPEN_EXISTING, flags, NULL); + if (hfile != INVALID_HANDLE_VALUE) { + BOOL result = _testFileTypeByHandle(hfile, testedType, FALSE); + CloseHandle(hfile); + return result; + } + + switch (GetLastError()) { + case ERROR_ACCESS_DENIED: + case ERROR_SHARING_VIOLATION: + case ERROR_CANT_ACCESS_FILE: + case ERROR_INVALID_PARAMETER: + int rc; + STRUCT_STAT st; + if (testedType == PY_IFREG || testedType == PY_IFDIR) { + rc = STAT(path, &st); } else { - hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL, - OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL); + // PY_IFRRP is not generally supported in this case, except for + // unhandled reparse points such as IO_REPARSE_TAG_APPEXECLINK. + rc = LSTAT(path, &st); } - if (hfile != INVALID_HANDLE_VALUE) { - if (GetFileInformationByHandleEx(hfile, FileBasicInfo, &info, - sizeof(info))) - { - result = info.FileAttributes & FILE_ATTRIBUTE_DIRECTORY; - } - else { - result = 0; - } - if (close_file) { - CloseHandle(hfile); - } - } - else { - STRUCT_STAT st; - switch (GetLastError()) { - case ERROR_ACCESS_DENIED: - case ERROR_SHARING_VIOLATION: - case ERROR_CANT_ACCESS_FILE: - case ERROR_INVALID_PARAMETER: - if (STAT(_path.wide, &st)) { - result = 0; - } - else { - result = S_ISDIR(st.st_mode); - } - break; - default: - result = 0; - } + if (!rc) { + return _testInfo(st.st_file_attributes, st.st_reparse_tag, + st.st_mode & S_IFREG, testedType); } } - Py_END_ALLOW_THREADS - path_cleanup(&_path); - if (result) { - Py_RETURN_TRUE; - } - Py_RETURN_FALSE; + return FALSE; } -/*[clinic input] -os._path_isfile +static BOOL +_testFileExistsByName(LPCWSTR path, BOOL followLinks) +{ + FILE_STAT_BASIC_INFORMATION info; + if (_Py_GetFileInformationByName(path, FileStatBasicByNameInfo, &info, + sizeof(info))) + { + if (!(info.FileAttributes & FILE_ATTRIBUTE_REPARSE_POINT) || + !followLinks && IsReparseTagNameSurrogate(info.ReparseTag)) + { + return TRUE; + } + } + else if (_Py_GetFileInformationByName_ErrorIsTrustworthy( + GetLastError())) + { + return FALSE; + } - path: 'O' + DWORD flags = FILE_FLAG_BACKUP_SEMANTICS; + if (!followLinks) { + flags |= FILE_FLAG_OPEN_REPARSE_POINT; + } + HANDLE hfile = CreateFileW(path, FILE_READ_ATTRIBUTES, 0, NULL, + OPEN_EXISTING, flags, NULL); + if (hfile != INVALID_HANDLE_VALUE) { + if (followLinks) { + CloseHandle(hfile); + return TRUE; + } + // Regular Reparse Points (PY_IFRRP) have to be traversed. + BOOL result = _testFileTypeByHandle(hfile, PY_IFRRP, FALSE); + CloseHandle(hfile); + if (!result) { + return TRUE; + } + hfile = CreateFileW(path, FILE_READ_ATTRIBUTES, 0, NULL, OPEN_EXISTING, + FILE_FLAG_BACKUP_SEMANTICS, NULL); + if (hfile != INVALID_HANDLE_VALUE) { + CloseHandle(hfile); + return TRUE; + } + } -Test whether a path is a regular file + switch (GetLastError()) { + case ERROR_ACCESS_DENIED: + case ERROR_SHARING_VIOLATION: + case ERROR_CANT_ACCESS_FILE: + case ERROR_INVALID_PARAMETER: + STRUCT_STAT _st; + return followLinks ? !STAT(path, &_st): !LSTAT(path, &_st); + } -[clinic start generated code]*/ + return FALSE; +} -static PyObject * -os__path_isfile_impl(PyObject *module, PyObject *path) -/*[clinic end generated code: output=2394ed7c4b5cfd85 input=de22d74960ade365]*/ -{ - HANDLE hfile; - BOOL close_file = TRUE; - FILE_BASIC_INFO info; - path_t _path = PATH_T_INITIALIZE("isfile", "path", 0, 1); - int result; - BOOL slow_path = TRUE; - FILE_STAT_BASIC_INFORMATION statInfo; - if (!path_converter(path, &_path)) { - path_cleanup(&_path); - if (PyErr_ExceptionMatches(PyExc_ValueError)) { - PyErr_Clear(); - Py_RETURN_FALSE; - } - return NULL; +static BOOL +_testFileExists(path_t *path, BOOL followLinks) +{ + BOOL result = FALSE; + if (path->value_error) { + return FALSE; } Py_BEGIN_ALLOW_THREADS - if (_path.wide) { - if (_Py_GetFileInformationByName(_path.wide, FileStatBasicByNameInfo, - &statInfo, sizeof(statInfo))) { - if (!(statInfo.FileAttributes & FILE_ATTRIBUTE_REPARSE_POINT)) { - slow_path = FALSE; - result = !(statInfo.FileAttributes & FILE_ATTRIBUTE_DIRECTORY); - } else if (statInfo.FileAttributes & FILE_ATTRIBUTE_DIRECTORY) { - slow_path = FALSE; - result = 0; + if (path->fd != -1) { + HANDLE hfile = _Py_get_osfhandle_noraise(path->fd); + if (hfile != INVALID_HANDLE_VALUE) { + if (GetFileType(hfile) != FILE_TYPE_UNKNOWN || !GetLastError()) { + result = TRUE; } - } else if (_Py_GetFileInformationByName_ErrorIsTrustworthy(GetLastError())) { - slow_path = FALSE; - result = 0; } } - if (slow_path) { - if (_path.fd != -1) { - hfile = _Py_get_osfhandle_noraise(_path.fd); - close_file = FALSE; - } - else { - hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL, - OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL); - } + else if (path->wide) { + result = _testFileExistsByName(path->wide, followLinks); + } + Py_END_ALLOW_THREADS + + return result; +} + + +static BOOL +_testFileType(path_t *path, int testedType) +{ + BOOL result = FALSE; + if (path->value_error) { + return FALSE; + } + + Py_BEGIN_ALLOW_THREADS + if (path->fd != -1) { + HANDLE hfile = _Py_get_osfhandle_noraise(path->fd); if (hfile != INVALID_HANDLE_VALUE) { - if (GetFileInformationByHandleEx(hfile, FileBasicInfo, &info, - sizeof(info))) - { - result = !(info.FileAttributes & FILE_ATTRIBUTE_DIRECTORY); - } - else { - result = 0; - } - if (close_file) { - CloseHandle(hfile); - } - } - else { - STRUCT_STAT st; - switch (GetLastError()) { - case ERROR_ACCESS_DENIED: - case ERROR_SHARING_VIOLATION: - case ERROR_CANT_ACCESS_FILE: - case ERROR_INVALID_PARAMETER: - if (STAT(_path.wide, &st)) { - result = 0; - } - else { - result = S_ISREG(st.st_mode); - } - break; - default: - result = 0; - } + result = _testFileTypeByHandle(hfile, testedType, TRUE); } } + else if (path->wide) { + result = _testFileTypeByName(path->wide, testedType); + } Py_END_ALLOW_THREADS - path_cleanup(&_path); - if (result) { - Py_RETURN_TRUE; - } - Py_RETURN_FALSE; + return result; } /*[clinic input] -os._path_exists +os._path_exists -> bool - path: 'O' + path: path_t(allow_fd=True, suppress_value_error=True) + / -Test whether a path exists. Returns False for broken symbolic links +Test whether a path exists. Returns False for broken symbolic links. [clinic start generated code]*/ -static PyObject * -os__path_exists_impl(PyObject *module, PyObject *path) -/*[clinic end generated code: output=f508c3b35e13a249 input=380f77cdfa0f7ae8]*/ +static int +os__path_exists_impl(PyObject *module, path_t *path) +/*[clinic end generated code: output=8da13acf666e16ba input=29198507a6082a57]*/ { - HANDLE hfile; - BOOL close_file = TRUE; - path_t _path = PATH_T_INITIALIZE("exists", "path", 0, 1); - int result; - BOOL slow_path = TRUE; - FILE_STAT_BASIC_INFORMATION statInfo; + return _testFileExists(path, TRUE); +} - if (!path_converter(path, &_path)) { - path_cleanup(&_path); - if (PyErr_ExceptionMatches(PyExc_ValueError)) { - PyErr_Clear(); - Py_RETURN_FALSE; - } - return NULL; - } - Py_BEGIN_ALLOW_THREADS - if (_path.wide) { - if (_Py_GetFileInformationByName(_path.wide, FileStatBasicByNameInfo, - &statInfo, sizeof(statInfo))) { - if (!(statInfo.FileAttributes & FILE_ATTRIBUTE_REPARSE_POINT)) { - slow_path = FALSE; - result = 1; - } - } else if (_Py_GetFileInformationByName_ErrorIsTrustworthy(GetLastError())) { - slow_path = FALSE; - result = 0; - } - } - if (slow_path) { - if (_path.fd != -1) { - hfile = _Py_get_osfhandle_noraise(_path.fd); - close_file = FALSE; - } - else { - hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL, - OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL); - } - if (hfile != INVALID_HANDLE_VALUE) { - result = 1; - if (close_file) { - CloseHandle(hfile); - } - } - else { - STRUCT_STAT st; - switch (GetLastError()) { - case ERROR_ACCESS_DENIED: - case ERROR_SHARING_VIOLATION: - case ERROR_CANT_ACCESS_FILE: - case ERROR_INVALID_PARAMETER: - if (STAT(_path.wide, &st)) { - result = 0; - } - else { - result = 1; - } - break; - default: - result = 0; - } - } - } - Py_END_ALLOW_THREADS +/*[clinic input] +os._path_lexists -> bool - path_cleanup(&_path); - if (result) { - Py_RETURN_TRUE; - } - Py_RETURN_FALSE; + path: path_t(allow_fd=True, suppress_value_error=True) + / + +Test whether a path exists. Returns True for broken symbolic links. + +[clinic start generated code]*/ + +static int +os__path_lexists_impl(PyObject *module, path_t *path) +/*[clinic end generated code: output=e7240ed5fc45bff3 input=03d9fed8bc6ce96f]*/ +{ + return _testFileExists(path, FALSE); +} + + +/*[clinic input] +os._path_isdir -> bool + + s as path: path_t(allow_fd=True, suppress_value_error=True) + +Return true if the pathname refers to an existing directory. + +[clinic start generated code]*/ + +static int +os__path_isdir_impl(PyObject *module, path_t *path) +/*[clinic end generated code: output=d5786196f9e2fa7a input=132a3b5301aecf79]*/ +{ + return _testFileType(path, PY_IFDIR); } /*[clinic input] -os._path_islink +os._path_isfile -> bool - path: 'O' + path: path_t(allow_fd=True, suppress_value_error=True) + +Test whether a path is a regular file + +[clinic start generated code]*/ + +static int +os__path_isfile_impl(PyObject *module, path_t *path) +/*[clinic end generated code: output=5c3073bc212b9863 input=4ac1fd350b30a39e]*/ +{ + return _testFileType(path, PY_IFREG); +} + + +/*[clinic input] +os._path_islink -> bool + + path: path_t(allow_fd=True, suppress_value_error=True) Test whether a path is a symbolic link [clinic start generated code]*/ -static PyObject * -os__path_islink_impl(PyObject *module, PyObject *path) -/*[clinic end generated code: output=6d8640b1a390c054 input=38a3cb937ccf59bf]*/ +static int +os__path_islink_impl(PyObject *module, path_t *path) +/*[clinic end generated code: output=30da7bda8296adcc input=7510ce05b547debb]*/ { - HANDLE hfile; - BOOL close_file = TRUE; - FILE_ATTRIBUTE_TAG_INFO info; - path_t _path = PATH_T_INITIALIZE("islink", "path", 0, 1); - int result; - BOOL slow_path = TRUE; - FILE_STAT_BASIC_INFORMATION statInfo; + return _testFileType(path, PY_IFLNK); +} - if (!path_converter(path, &_path)) { - path_cleanup(&_path); - if (PyErr_ExceptionMatches(PyExc_ValueError)) { - PyErr_Clear(); - Py_RETURN_FALSE; - } - return NULL; - } - Py_BEGIN_ALLOW_THREADS - if (_path.wide) { - if (_Py_GetFileInformationByName(_path.wide, FileStatBasicByNameInfo, - &statInfo, sizeof(statInfo))) { - slow_path = FALSE; - if (statInfo.FileAttributes & FILE_ATTRIBUTE_REPARSE_POINT) { - result = (statInfo.ReparseTag == IO_REPARSE_TAG_SYMLINK); - } - else { - result = 0; - } - } else if (_Py_GetFileInformationByName_ErrorIsTrustworthy(GetLastError())) { - slow_path = FALSE; - result = 0; - } - } - if (slow_path) { - if (_path.fd != -1) { - hfile = _Py_get_osfhandle_noraise(_path.fd); - close_file = FALSE; - } - else { - hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL, - OPEN_EXISTING, - FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS, - NULL); - } - if (hfile != INVALID_HANDLE_VALUE) { - if (GetFileInformationByHandleEx(hfile, FileAttributeTagInfo, &info, - sizeof(info))) - { - result = (info.ReparseTag == IO_REPARSE_TAG_SYMLINK); - } - else { - result = 0; - } - if (close_file) { - CloseHandle(hfile); - } - } - else { - STRUCT_STAT st; - switch (GetLastError()) { - case ERROR_ACCESS_DENIED: - case ERROR_SHARING_VIOLATION: - case ERROR_CANT_ACCESS_FILE: - case ERROR_INVALID_PARAMETER: - if (LSTAT(_path.wide, &st)) { - result = 0; - } - else { - result = S_ISLNK(st.st_mode); - } - break; - default: - result = 0; - } - } - } - Py_END_ALLOW_THREADS +/*[clinic input] +os._path_isjunction -> bool - path_cleanup(&_path); - if (result) { - Py_RETURN_TRUE; - } - Py_RETURN_FALSE; + path: path_t(allow_fd=True, suppress_value_error=True) + +Test whether a path is a junction + +[clinic start generated code]*/ + +static int +os__path_isjunction_impl(PyObject *module, path_t *path) +/*[clinic end generated code: output=e1d17a9dd18a9945 input=7dcb8bc4e972fcaf]*/ +{ + return _testFileType(path, PY_IFMNT); } +#undef PY_IFREG +#undef PY_IFDIR +#undef PY_IFLNK +#undef PY_IFMNT +#undef PY_IFLRP +#undef PY_IFRRP + #endif /* MS_WINDOWS */ /*[clinic input] os._path_splitroot_ex - path: unicode + path: path_t(make_wide=True, nonstrict=True) +Split a pathname into drive, root and tail. + +The tail contains anything after the root. [clinic start generated code]*/ static PyObject * -os__path_splitroot_ex_impl(PyObject *module, PyObject *path) -/*[clinic end generated code: output=de97403d3dfebc40 input=f1470e12d899f9ac]*/ +os__path_splitroot_ex_impl(PyObject *module, path_t *path) +/*[clinic end generated code: output=4b0072b6cdf4b611 input=6eb76e9173412c92]*/ { - Py_ssize_t len, drvsize, rootsize; + Py_ssize_t drvsize, rootsize; PyObject *drv = NULL, *root = NULL, *tail = NULL, *result = NULL; - wchar_t *buffer = PyUnicode_AsWideCharString(path, &len); - if (!buffer) { - goto exit; - } - - _Py_skiproot(buffer, len, &drvsize, &rootsize); + const wchar_t *buffer = path->wide; + _Py_skiproot(buffer, path->length, &drvsize, &rootsize); drv = PyUnicode_FromWideChar(buffer, drvsize); if (drv == NULL) { goto exit; @@ -5498,13 +5522,26 @@ os__path_splitroot_ex_impl(PyObject *module, PyObject *path) goto exit; } tail = PyUnicode_FromWideChar(&buffer[drvsize + rootsize], - len - drvsize - rootsize); + path->length - drvsize - rootsize); if (tail == NULL) { goto exit; } + if (PyBytes_Check(path->object)) { + Py_SETREF(drv, PyUnicode_EncodeFSDefault(drv)); + if (drv == NULL) { + goto exit; + } + Py_SETREF(root, PyUnicode_EncodeFSDefault(root)); + if (root == NULL) { + goto exit; + } + Py_SETREF(tail, PyUnicode_EncodeFSDefault(tail)); + if (tail == NULL) { + goto exit; + } + } result = PyTuple_Pack(3, drv, root, tail); exit: - PyMem_Free(buffer); Py_XDECREF(drv); Py_XDECREF(root); Py_XDECREF(tail); @@ -5515,172 +5552,31 @@ os__path_splitroot_ex_impl(PyObject *module, PyObject *path) /*[clinic input] os._path_normpath - path: object + path: path_t(make_wide=True, nonstrict=True) -Basic path normalization. +Normalize path, eliminating double slashes, etc. [clinic start generated code]*/ static PyObject * -os__path_normpath_impl(PyObject *module, PyObject *path) -/*[clinic end generated code: output=b94d696d828019da input=5e90c39e12549dc0]*/ +os__path_normpath_impl(PyObject *module, path_t *path) +/*[clinic end generated code: output=d353e7ed9410c044 input=3d4ac23b06332dcb]*/ { - if (!PyUnicode_Check(path)) { - PyErr_Format(PyExc_TypeError, "expected 'str', not '%.200s'", - Py_TYPE(path)->tp_name); - return NULL; - } - Py_ssize_t len; - wchar_t *buffer = PyUnicode_AsWideCharString(path, &len); - if (!buffer) { - return NULL; - } + PyObject *result; Py_ssize_t norm_len; - wchar_t *norm_path = _Py_normpath_and_size(buffer, len, &norm_len); - PyObject *result = PyUnicode_FromWideChar(norm_path, norm_len); - PyMem_Free(buffer); - return result; -} - -#ifdef MS_WINDOWS - -/* We centralise SECURITY_ATTRIBUTE initialization based around -templates that will probably mostly match common POSIX mode settings. -The _Py_SECURITY_ATTRIBUTE_DATA structure contains temporary data, as -a constructed SECURITY_ATTRIBUTE structure typically refers to memory -that has to be alive while it's being used. - -Typical use will look like: - SECURITY_ATTRIBUTES *pSecAttr = NULL; - struct _Py_SECURITY_ATTRIBUTE_DATA secAttrData; - int error, error2; - - Py_BEGIN_ALLOW_THREADS - switch (mode) { - case 0x1C0: // 0o700 - error = initializeMkdir700SecurityAttributes(&pSecAttr, &secAttrData); - break; - ... - default: - error = initializeDefaultSecurityAttributes(&pSecAttr, &secAttrData); - break; - } - - if (!error) { - // do operation, passing pSecAttr - } - - // Unconditionally clear secAttrData. - error2 = clearSecurityAttributes(&pSecAttr, &secAttrData); - if (!error) { - error = error2; + wchar_t *norm_path = _Py_normpath_and_size((wchar_t *)path->wide, + path->length, &norm_len); + if (!norm_len) { + result = PyUnicode_FromOrdinal('.'); } - Py_END_ALLOW_THREADS - - if (error) { - PyErr_SetFromWindowsErr(error); - return NULL; - } -*/ - -struct _Py_SECURITY_ATTRIBUTE_DATA { - SECURITY_ATTRIBUTES securityAttributes; - PACL acl; - SECURITY_DESCRIPTOR sd; - EXPLICIT_ACCESS_W ea[4]; - char sid[64]; -}; - -static int -initializeDefaultSecurityAttributes( - PSECURITY_ATTRIBUTES *securityAttributes, - struct _Py_SECURITY_ATTRIBUTE_DATA *data -) { - assert(securityAttributes); - assert(data); - *securityAttributes = NULL; - memset(data, 0, sizeof(*data)); - return 0; -} - -static int -initializeMkdir700SecurityAttributes( - PSECURITY_ATTRIBUTES *securityAttributes, - struct _Py_SECURITY_ATTRIBUTE_DATA *data -) { - assert(securityAttributes); - assert(data); - *securityAttributes = NULL; - memset(data, 0, sizeof(*data)); - - if (!InitializeSecurityDescriptor(&data->sd, SECURITY_DESCRIPTOR_REVISION) - || !SetSecurityDescriptorGroup(&data->sd, NULL, TRUE)) { - return GetLastError(); - } - - int use_alias = 0; - DWORD cbSid = sizeof(data->sid); - if (!CreateWellKnownSid(WinCreatorOwnerRightsSid, NULL, (PSID)data->sid, &cbSid)) { - use_alias = 1; - } - - data->securityAttributes.nLength = sizeof(SECURITY_ATTRIBUTES); - data->ea[0].grfAccessPermissions = GENERIC_ALL; - data->ea[0].grfAccessMode = SET_ACCESS; - data->ea[0].grfInheritance = SUB_CONTAINERS_AND_OBJECTS_INHERIT; - if (use_alias) { - data->ea[0].Trustee.TrusteeForm = TRUSTEE_IS_NAME; - data->ea[0].Trustee.TrusteeType = TRUSTEE_IS_ALIAS; - data->ea[0].Trustee.ptstrName = L"CURRENT_USER"; - } else { - data->ea[0].Trustee.TrusteeForm = TRUSTEE_IS_SID; - data->ea[0].Trustee.TrusteeType = TRUSTEE_IS_WELL_KNOWN_GROUP; - data->ea[0].Trustee.ptstrName = (LPWCH)(SID*)data->sid; - } - - data->ea[1].grfAccessPermissions = GENERIC_ALL; - data->ea[1].grfAccessMode = SET_ACCESS; - data->ea[1].grfInheritance = SUB_CONTAINERS_AND_OBJECTS_INHERIT; - data->ea[1].Trustee.TrusteeForm = TRUSTEE_IS_NAME; - data->ea[1].Trustee.TrusteeType = TRUSTEE_IS_ALIAS; - data->ea[1].Trustee.ptstrName = L"SYSTEM"; - - data->ea[2].grfAccessPermissions = GENERIC_ALL; - data->ea[2].grfAccessMode = SET_ACCESS; - data->ea[2].grfInheritance = SUB_CONTAINERS_AND_OBJECTS_INHERIT; - data->ea[2].Trustee.TrusteeForm = TRUSTEE_IS_NAME; - data->ea[2].Trustee.TrusteeType = TRUSTEE_IS_ALIAS; - data->ea[2].Trustee.ptstrName = L"ADMINISTRATORS"; - - int r = SetEntriesInAclW(3, data->ea, NULL, &data->acl); - if (r) { - return r; - } - if (!SetSecurityDescriptorDacl(&data->sd, TRUE, data->acl, FALSE)) { - return GetLastError(); + else { + result = PyUnicode_FromWideChar(norm_path, norm_len); } - data->securityAttributes.lpSecurityDescriptor = &data->sd; - *securityAttributes = &data->securityAttributes; - return 0; -} - -static int -clearSecurityAttributes( - PSECURITY_ATTRIBUTES *securityAttributes, - struct _Py_SECURITY_ATTRIBUTE_DATA *data -) { - assert(securityAttributes); - assert(data); - *securityAttributes = NULL; - if (data->acl) { - if (LocalFree((void *)data->acl)) { - return GetLastError(); - } + if (PyBytes_Check(path->object)) { + Py_SETREF(result, PyUnicode_EncodeFSDefault(result)); } - return 0; + return result; } -#endif - /*[clinic input] os.mkdir @@ -5713,8 +5609,8 @@ os_mkdir_impl(PyObject *module, path_t *path, int mode, int dir_fd) #ifdef MS_WINDOWS int error = 0; int pathError = 0; + SECURITY_ATTRIBUTES secAttr = { sizeof(secAttr) }; SECURITY_ATTRIBUTES *pSecAttr = NULL; - struct _Py_SECURITY_ATTRIBUTE_DATA secAttrData; #endif #ifdef HAVE_MKDIRAT int mkdirat_unavailable = 0; @@ -5727,26 +5623,34 @@ os_mkdir_impl(PyObject *module, path_t *path, int mode, int dir_fd) #ifdef MS_WINDOWS Py_BEGIN_ALLOW_THREADS - switch (mode) { - case 0x1C0: // 0o700 - error = initializeMkdir700SecurityAttributes(&pSecAttr, &secAttrData); - break; - default: - error = initializeDefaultSecurityAttributes(&pSecAttr, &secAttrData); - break; + if (mode == 0700 /* 0o700 */) { + ULONG sdSize; + pSecAttr = &secAttr; + // Set a discretionary ACL (D) that is protected (P) and includes + // inheritable (OICI) entries that allow (A) full control (FA) to + // SYSTEM (SY), Administrators (BA), and the owner (OW). + if (!ConvertStringSecurityDescriptorToSecurityDescriptorW( + L"D:P(A;OICI;FA;;;SY)(A;OICI;FA;;;BA)(A;OICI;FA;;;OW)", + SDDL_REVISION_1, + &secAttr.lpSecurityDescriptor, + &sdSize + )) { + error = GetLastError(); + } } if (!error) { result = CreateDirectoryW(path->wide, pSecAttr); - error = clearSecurityAttributes(&pSecAttr, &secAttrData); - } else { - // Ignore error from "clear" - we have a more interesting one already - clearSecurityAttributes(&pSecAttr, &secAttrData); + if (secAttr.lpSecurityDescriptor && + // uncommonly, LocalFree returns non-zero on error, but still uses + // GetLastError() to see what the error code is + LocalFree(secAttr.lpSecurityDescriptor)) { + error = GetLastError(); + } } Py_END_ALLOW_THREADS if (error) { - PyErr_SetFromWindowsErr(error); - return NULL; + return PyErr_SetFromWindowsErr(error); } if (!result) { return path_error(path); @@ -10396,7 +10300,7 @@ os_readlink_impl(PyObject *module, path_t *path, int dir_fd) name[1] = L'\\'; } result = PyUnicode_FromWideChar(name, nameLen); - if (result && path->narrow) { + if (result && PyBytes_Check(path->object)) { Py_SETREF(result, PyUnicode_EncodeFSDefault(result)); } } @@ -12637,9 +12541,31 @@ os_mknod_impl(PyObject *module, path_t *path, int mode, dev_t device, #endif /* defined(HAVE_MKNOD) && defined(HAVE_MAKEDEV) */ +static PyObject * +major_minor_conv(unsigned int value) +{ +#ifdef NODEV + if (value == (unsigned int)NODEV) { + return PyLong_FromLong((int)NODEV); + } +#endif + return PyLong_FromUnsignedLong(value); +} + +static int +major_minor_check(dev_t value) +{ +#ifdef NODEV + if (value == NODEV) { + return 1; + } +#endif + return (dev_t)(unsigned int)value == value; +} + #ifdef HAVE_DEVICE_MACROS /*[clinic input] -os.major -> unsigned_int +os.major device: dev_t / @@ -12647,16 +12573,16 @@ os.major -> unsigned_int Extracts a device major number from a raw device number. [clinic start generated code]*/ -static unsigned int +static PyObject * os_major_impl(PyObject *module, dev_t device) -/*[clinic end generated code: output=5b3b2589bafb498e input=1e16a4d30c4d4462]*/ +/*[clinic end generated code: output=4071ffee17647891 input=b1a0a14ec9448229]*/ { - return major(device); + return major_minor_conv(major(device)); } /*[clinic input] -os.minor -> unsigned_int +os.minor device: dev_t / @@ -12664,28 +12590,33 @@ os.minor -> unsigned_int Extracts a device minor number from a raw device number. [clinic start generated code]*/ -static unsigned int +static PyObject * os_minor_impl(PyObject *module, dev_t device) -/*[clinic end generated code: output=5e1a25e630b0157d input=0842c6d23f24c65e]*/ +/*[clinic end generated code: output=306cb78e3bc5004f input=2f686e463682a9da]*/ { - return minor(device); + return major_minor_conv(minor(device)); } /*[clinic input] os.makedev -> dev_t - major: int - minor: int + major: dev_t + minor: dev_t / Composes a raw device number from the major and minor device numbers. [clinic start generated code]*/ static dev_t -os_makedev_impl(PyObject *module, int major, int minor) -/*[clinic end generated code: output=881aaa4aba6f6a52 input=4b9fd8fc73cbe48f]*/ +os_makedev_impl(PyObject *module, dev_t major, dev_t minor) +/*[clinic end generated code: output=cad6125c51f5af80 input=2146126ec02e55c1]*/ { + if (!major_minor_check(major) || !major_minor_check(minor)) { + PyErr_SetString(PyExc_OverflowError, + "Python int too large to convert to C unsigned int"); + return (dev_t)-1; + } return makedev(major, minor); } #endif /* HAVE_DEVICE_MACROS */ @@ -16017,7 +15948,8 @@ DirEntry_from_find_data(PyObject *module, path_t *path, WIN32_FIND_DATAW *dataW) entry->name = PyUnicode_FromWideChar(dataW->cFileName, -1); if (!entry->name) goto error; - if (path->narrow) { + int return_bytes = path->wide && PyBytes_Check(path->object); + if (return_bytes) { Py_SETREF(entry->name, PyUnicode_EncodeFSDefault(entry->name)); if (!entry->name) goto error; @@ -16031,7 +15963,7 @@ DirEntry_from_find_data(PyObject *module, path_t *path, WIN32_FIND_DATAW *dataW) PyMem_Free(joined_path); if (!entry->path) goto error; - if (path->narrow) { + if (return_bytes) { Py_SETREF(entry->path, PyUnicode_EncodeFSDefault(entry->path)); if (!entry->path) goto error; @@ -16845,6 +16777,37 @@ os__supports_virtual_terminal_impl(PyObject *module) } #endif +/*[clinic input] +os._inputhook + +Calls PyOS_CallInputHook droppong the GIL first +[clinic start generated code]*/ + +static PyObject * +os__inputhook_impl(PyObject *module) +/*[clinic end generated code: output=525aca4ef3c6149f input=fc531701930d064f]*/ +{ + int result = 0; + if (PyOS_InputHook) { + Py_BEGIN_ALLOW_THREADS; + result = PyOS_InputHook(); + Py_END_ALLOW_THREADS; + } + return PyLong_FromLong(result); +} + +/*[clinic input] +os._is_inputhook_installed + +Checks if PyOS_CallInputHook is set +[clinic start generated code]*/ + +static PyObject * +os__is_inputhook_installed_impl(PyObject *module) +/*[clinic end generated code: output=3b3eab4f672c689a input=ff177c9938dd76d8]*/ +{ + return PyBool_FromLong(PyOS_InputHook != NULL); +} static PyMethodDef posix_methods[] = { @@ -17053,9 +17016,13 @@ static PyMethodDef posix_methods[] = { OS__PATH_ISDIR_METHODDEF OS__PATH_ISFILE_METHODDEF OS__PATH_ISLINK_METHODDEF + OS__PATH_ISJUNCTION_METHODDEF OS__PATH_EXISTS_METHODDEF + OS__PATH_LEXISTS_METHODDEF OS__SUPPORTS_VIRTUAL_TERMINAL_METHODDEF + OS__INPUTHOOK_METHODDEF + OS__IS_INPUTHOOK_INSTALLED_METHODDEF {NULL, NULL} /* Sentinel */ }; diff --git a/Modules/timemodule.c b/Modules/timemodule.c index 0511339978897a..ed2d32688ecea5 100644 --- a/Modules/timemodule.c +++ b/Modules/timemodule.c @@ -462,7 +462,18 @@ tmtotuple(time_module_state *state, struct tm *p if (v == NULL) return NULL; -#define SET(i,val) PyStructSequence_SET_ITEM(v, i, PyLong_FromLong((long) val)) +#define SET_ITEM(INDEX, CALL) \ + do { \ + PyObject *obj = (CALL); \ + if (obj == NULL) { \ + Py_DECREF(v); \ + return NULL; \ + } \ + PyStructSequence_SET_ITEM(v, (INDEX), obj); \ + } while (0) + +#define SET(INDEX, VAL) \ + SET_ITEM((INDEX), PyLong_FromLong((long) (VAL))) SET(0, p->tm_year + 1900); SET(1, p->tm_mon + 1); /* Want January == 1 */ @@ -474,19 +485,15 @@ tmtotuple(time_module_state *state, struct tm *p SET(7, p->tm_yday + 1); /* Want January, 1 == 1 */ SET(8, p->tm_isdst); #ifdef HAVE_STRUCT_TM_TM_ZONE - PyStructSequence_SET_ITEM(v, 9, - PyUnicode_DecodeLocale(p->tm_zone, "surrogateescape")); + SET_ITEM(9, PyUnicode_DecodeLocale(p->tm_zone, "surrogateescape")); SET(10, p->tm_gmtoff); #else - PyStructSequence_SET_ITEM(v, 9, - PyUnicode_DecodeLocale(zone, "surrogateescape")); - PyStructSequence_SET_ITEM(v, 10, _PyLong_FromTime_t(gmtoff)); + SET_ITEM(9, PyUnicode_DecodeLocale(zone, "surrogateescape")); + SET_ITEM(10, _PyLong_FromTime_t(gmtoff)); #endif /* HAVE_STRUCT_TM_TM_ZONE */ + #undef SET - if (PyErr_Occurred()) { - Py_XDECREF(v); - return NULL; - } +#undef SET_ITEM return v; } diff --git a/Objects/clinic/complexobject.c.h b/Objects/clinic/complexobject.c.h index 49b50304021f7b..46c3b352562445 100644 --- a/Objects/clinic/complexobject.c.h +++ b/Objects/clinic/complexobject.c.h @@ -94,9 +94,12 @@ PyDoc_STRVAR(complex_new__doc__, "complex(real=0, imag=0)\n" "--\n" "\n" -"Create a complex number from a real part and an optional imaginary part.\n" +"Create a complex number from a string or numbers.\n" "\n" -"This is equivalent to (real + imag*1j) where imag defaults to 0."); +"If a string is given, parse it as a complex number.\n" +"If a single number is given, convert it to a complex number.\n" +"If the \'real\' or \'imag\' arguments are given, create a complex number\n" +"with the specified real and imaginary components."); static PyObject * complex_new_impl(PyTypeObject *type, PyObject *r, PyObject *i); @@ -157,4 +160,4 @@ complex_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=04e6261649967b30 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=295ecfd71389d7fe input=a9049054013a1b77]*/ diff --git a/Objects/codeobject.c b/Objects/codeobject.c index 3d804f73a54088..55b512b3f1abde 100644 --- a/Objects/codeobject.c +++ b/Objects/codeobject.c @@ -110,7 +110,7 @@ should_intern_string(PyObject *o) // unless we've disabled immortalizing objects that use deferred reference // counting. PyInterpreterState *interp = _PyInterpreterState_GET(); - if (interp->gc.immortalize.enable_on_thread_created) { + if (_Py_atomic_load_int(&interp->gc.immortalize) < 0) { return 1; } #endif @@ -240,7 +240,7 @@ intern_constants(PyObject *tuple, int *modified) PyThreadState *tstate = PyThreadState_GET(); if (!_Py_IsImmortal(v) && !PyCode_Check(v) && !PyUnicode_CheckExact(v) && - tstate->interp->gc.immortalize.enable_on_thread_created) + _Py_atomic_load_int(&tstate->interp->gc.immortalize) >= 0) { PyObject *interned = intern_one_constant(v); if (interned == NULL) { @@ -2020,7 +2020,7 @@ code_hash(PyCodeObject *co) Py_uhash_t uhash = 20221211; #define SCRAMBLE_IN(H) do { \ uhash ^= (Py_uhash_t)(H); \ - uhash *= _PyHASH_MULTIPLIER; \ + uhash *= PyHASH_MULTIPLIER; \ } while (0) #define SCRAMBLE_IN_HASH(EXPR) do { \ Py_hash_t h = PyObject_Hash(EXPR); \ diff --git a/Objects/complexobject.c b/Objects/complexobject.c index d8b0e84da5df4a..1aa3960a1c63f9 100644 --- a/Objects/complexobject.c +++ b/Objects/complexobject.c @@ -911,14 +911,17 @@ complex.__new__ as complex_new real as r: object(c_default="NULL") = 0 imag as i: object(c_default="NULL") = 0 -Create a complex number from a real part and an optional imaginary part. +Create a complex number from a string or numbers. -This is equivalent to (real + imag*1j) where imag defaults to 0. +If a string is given, parse it as a complex number. +If a single number is given, convert it to a complex number. +If the 'real' or 'imag' arguments are given, create a complex number +with the specified real and imaginary components. [clinic start generated code]*/ static PyObject * complex_new_impl(PyTypeObject *type, PyObject *r, PyObject *i) -/*[clinic end generated code: output=b6c7dd577b537dc1 input=f4c667f2596d4fd1]*/ +/*[clinic end generated code: output=b6c7dd577b537dc1 input=ff4268dc540958a4]*/ { PyObject *tmp; PyNumberMethods *nbr, *nbi = NULL; diff --git a/Objects/dictobject.c b/Objects/dictobject.c index b0fce09d7940e0..6e1c3b93fd391b 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -2003,7 +2003,7 @@ dictresize(PyInterpreterState *interp, PyDictObject *mp, if (oldvalues->embedded) { assert(oldvalues->embedded == 1); assert(oldvalues->valid == 1); - oldvalues->valid = 0; + FT_ATOMIC_STORE_UINT8(oldvalues->valid, 0); } else { free_values(oldvalues, IS_DICT_SHARED(mp)); @@ -5396,6 +5396,7 @@ static int dictiter_iternext_threadsafe(PyDictObject *d, PyObject *self, PyObject **out_key, PyObject **out_value) { + int res; dictiterobject *di = (dictiterobject *)self; Py_ssize_t i; PyDictKeysObject *k; @@ -5491,7 +5492,6 @@ dictiter_iternext_threadsafe(PyDictObject *d, PyObject *self, Py_DECREF(d); return -1; - int res; try_locked: Py_BEGIN_CRITICAL_SECTION(d); res = dictiter_iternextitem_lock_held(d, self, out_key, out_value); diff --git a/Objects/exceptions.c b/Objects/exceptions.c index 63c461d34fb4ff..f27ca2f5ddfb5e 100644 --- a/Objects/exceptions.c +++ b/Objects/exceptions.c @@ -3685,7 +3685,7 @@ _PyExc_FiniTypes(PyInterpreterState *interp) { for (Py_ssize_t i=Py_ARRAY_LENGTH(static_exceptions) - 1; i >= 0; i--) { PyTypeObject *exc = static_exceptions[i].exc; - _PyStaticType_Dealloc(interp, exc); + _PyStaticType_FiniBuiltin(interp, exc); } } diff --git a/Objects/frameobject.c b/Objects/frameobject.c index d7fcb1925d286c..0465aaa01bb38c 100644 --- a/Objects/frameobject.c +++ b/Objects/frameobject.c @@ -637,6 +637,23 @@ framelocalsproxy_setdefault(PyObject* self, PyObject *const *args, Py_ssize_t na return result; } +static PyObject* +framelocalsproxy_copy(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + PyObject* result = PyDict_New(); + + if (result == NULL) { + return NULL; + } + + if (PyDict_Update(result, self) < 0) { + Py_DECREF(result); + return NULL; + } + + return result; +} + static PyObject* framelocalsproxy_reversed(PyObject *self, void *Py_UNUSED(ignored)) { @@ -677,6 +694,8 @@ static PyMethodDef framelocalsproxy_methods[] = { NULL}, {"__reversed__", _PyCFunction_CAST(framelocalsproxy_reversed), METH_NOARGS, NULL}, + {"copy", _PyCFunction_CAST(framelocalsproxy_copy), METH_NOARGS, + NULL}, {"keys", _PyCFunction_CAST(framelocalsproxy_keys), METH_NOARGS, NULL}, {"values", _PyCFunction_CAST(framelocalsproxy_values), METH_NOARGS, @@ -1900,8 +1919,7 @@ frame_get_var(_PyInterpreterFrame *frame, PyCodeObject *co, int i, } // (likely) Otherwise it is an arg (kind & CO_FAST_LOCAL), // with the initial value set when the frame was created... - // (unlikely) ...or it was set to some initial value by - // an earlier call to PyFrame_LocalsToFast(). + // (unlikely) ...or it was set via the f_locals proxy. } } } @@ -2014,18 +2032,24 @@ PyFrame_GetVarString(PyFrameObject *frame, const char *name) int PyFrame_FastToLocalsWithError(PyFrameObject *f) { + // Nothing to do here, as f_locals is now a write-through proxy in + // optimized frames. Soft-deprecated, since there's no maintenance hassle. return 0; } void PyFrame_FastToLocals(PyFrameObject *f) { + // Nothing to do here, as f_locals is now a write-through proxy in + // optimized frames. Soft-deprecated, since there's no maintenance hassle. return; } void PyFrame_LocalsToFast(PyFrameObject *f, int clear) { + // Nothing to do here, as f_locals is now a write-through proxy in + // optimized frames. Soft-deprecated, since there's no maintenance hassle. return; } diff --git a/Objects/listobject.c b/Objects/listobject.c index 3c4e2d2e6ed7de..d09bb6391034d1 100644 --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -192,6 +192,7 @@ list_preallocate_exact(PyListObject *self, Py_ssize_t size) return -1; } items = array->ob_item; + memset(items, 0, size * sizeof(PyObject *)); #else items = PyMem_New(PyObject*, size); if (items == NULL) { @@ -199,7 +200,7 @@ list_preallocate_exact(PyListObject *self, Py_ssize_t size) return -1; } #endif - self->ob_item = items; + FT_ATOMIC_STORE_PTR_RELEASE(self->ob_item, items); self->allocated = size; return 0; } @@ -350,7 +351,11 @@ list_item_impl(PyListObject *self, Py_ssize_t idx) if (!valid_index(idx, size)) { goto exit; } +#ifdef Py_GIL_DISABLED + item = _Py_NewRefWithLock(self->ob_item[idx]); +#else item = Py_NewRef(self->ob_item[idx]); +#endif exit: Py_END_CRITICAL_SECTION(); return item; @@ -655,14 +660,15 @@ list_item(PyObject *aa, Py_ssize_t i) return NULL; } PyObject *item; - Py_BEGIN_CRITICAL_SECTION(a); #ifdef Py_GIL_DISABLED - if (!_Py_IsOwnedByCurrentThread((PyObject *)a) && !_PyObject_GC_IS_SHARED(a)) { - _PyObject_GC_SET_SHARED(a); + item = list_get_item_ref(a, i); + if (item == NULL) { + PyErr_SetObject(PyExc_IndexError, &_Py_STR(list_err)); + return NULL; } -#endif +#else item = Py_NewRef(a->ob_item[i]); - Py_END_CRITICAL_SECTION(); +#endif return item; } diff --git a/Objects/object.c b/Objects/object.c index 8ad0389cbc7626..1127fedc3f0c44 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -401,24 +401,27 @@ Py_ssize_t _Py_ExplicitMergeRefcount(PyObject *op, Py_ssize_t extra) { assert(!_Py_IsImmortal(op)); + +#ifdef Py_REF_DEBUG + _Py_AddRefTotal(_PyThreadState_GET(), extra); +#endif + + // gh-119999: Write to ob_ref_local and ob_tid before merging the refcount. + Py_ssize_t local = (Py_ssize_t)op->ob_ref_local; + _Py_atomic_store_uint32_relaxed(&op->ob_ref_local, 0); + _Py_atomic_store_uintptr_relaxed(&op->ob_tid, 0); + Py_ssize_t refcnt; Py_ssize_t new_shared; Py_ssize_t shared = _Py_atomic_load_ssize_relaxed(&op->ob_ref_shared); do { refcnt = Py_ARITHMETIC_RIGHT_SHIFT(Py_ssize_t, shared, _Py_REF_SHARED_SHIFT); - refcnt += (Py_ssize_t)op->ob_ref_local; + refcnt += local; refcnt += extra; new_shared = _Py_REF_SHARED(refcnt, _Py_REF_MERGED); } while (!_Py_atomic_compare_exchange_ssize(&op->ob_ref_shared, &shared, new_shared)); - -#ifdef Py_REF_DEBUG - _Py_AddRefTotal(_PyThreadState_GET(), extra); -#endif - - _Py_atomic_store_uint32_relaxed(&op->ob_ref_local, 0); - _Py_atomic_store_uintptr_relaxed(&op->ob_tid, 0); return refcnt; } #endif /* Py_GIL_DISABLED */ @@ -2359,7 +2362,7 @@ _PyTypes_FiniTypes(PyInterpreterState *interp) // their base classes. for (Py_ssize_t i=Py_ARRAY_LENGTH(static_types)-1; i>=0; i--) { PyTypeObject *type = static_types[i]; - _PyStaticType_Dealloc(interp, type); + _PyStaticType_FiniBuiltin(interp, type); } } @@ -2431,14 +2434,14 @@ _PyObject_SetDeferredRefcount(PyObject *op) assert(PyType_IS_GC(Py_TYPE(op))); assert(_Py_IsOwnedByCurrentThread(op)); assert(op->ob_ref_shared == 0); + _PyObject_SET_GC_BITS(op, _PyGC_BITS_DEFERRED); PyInterpreterState *interp = _PyInterpreterState_GET(); - if (interp->gc.immortalize.enabled) { + if (_Py_atomic_load_int_relaxed(&interp->gc.immortalize) == 1) { // gh-117696: immortalize objects instead of using deferred reference // counting for now. _Py_SetImmortal(op); return; } - op->ob_gc_bits |= _PyGC_BITS_DEFERRED; op->ob_ref_local += 1; op->ob_ref_shared = _Py_REF_QUEUED; #endif diff --git a/Objects/structseq.c b/Objects/structseq.c index ec5c5ab45ba813..d8289f2638db0f 100644 --- a/Objects/structseq.c +++ b/Objects/structseq.c @@ -718,7 +718,7 @@ _PyStructSequence_FiniBuiltin(PyInterpreterState *interp, PyTypeObject *type) return; } - _PyStaticType_Dealloc(interp, type); + _PyStaticType_FiniBuiltin(interp, type); if (_Py_IsMainInterpreter(interp)) { // Undo _PyStructSequence_InitBuiltinWithFlags(). diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 4b144fab5de8f1..18b7dfce3d8038 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -131,93 +131,159 @@ type_from_ref(PyObject *ref) #ifndef NDEBUG static inline int -static_builtin_index_is_set(PyTypeObject *self) +managed_static_type_index_is_set(PyTypeObject *self) { return self->tp_subclasses != NULL; } #endif static inline size_t -static_builtin_index_get(PyTypeObject *self) +managed_static_type_index_get(PyTypeObject *self) { - assert(static_builtin_index_is_set(self)); + assert(managed_static_type_index_is_set(self)); /* We store a 1-based index so 0 can mean "not initialized". */ return (size_t)self->tp_subclasses - 1; } static inline void -static_builtin_index_set(PyTypeObject *self, size_t index) +managed_static_type_index_set(PyTypeObject *self, size_t index) { - assert(index < _Py_MAX_STATIC_BUILTIN_TYPES); + assert(index < _Py_MAX_MANAGED_STATIC_BUILTIN_TYPES); /* We store a 1-based index so 0 can mean "not initialized". */ self->tp_subclasses = (PyObject *)(index + 1); } static inline void -static_builtin_index_clear(PyTypeObject *self) +managed_static_type_index_clear(PyTypeObject *self) { self->tp_subclasses = NULL; } -static inline static_builtin_state * +static inline managed_static_type_state * static_builtin_state_get(PyInterpreterState *interp, PyTypeObject *self) { - return &(interp->types.builtins[static_builtin_index_get(self)]); + return &(interp->types.builtins.initialized[ + managed_static_type_index_get(self)]); +} + +static inline managed_static_type_state * +static_ext_type_state_get(PyInterpreterState *interp, PyTypeObject *self) +{ + return &(interp->types.for_extensions.initialized[ + managed_static_type_index_get(self)]); +} + +static managed_static_type_state * +managed_static_type_state_get(PyInterpreterState *interp, PyTypeObject *self) +{ + // It's probably a builtin type. + size_t index = managed_static_type_index_get(self); + managed_static_type_state *state = + &(interp->types.builtins.initialized[index]); + if (state->type == self) { + return state; + } + if (index > _Py_MAX_MANAGED_STATIC_EXT_TYPES) { + return state; + } + return &(interp->types.for_extensions.initialized[index]); } /* For static types we store some state in an array on each interpreter. */ -static_builtin_state * +managed_static_type_state * _PyStaticType_GetState(PyInterpreterState *interp, PyTypeObject *self) { assert(self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN); - return static_builtin_state_get(interp, self); + return managed_static_type_state_get(interp, self); } /* Set the type's per-interpreter state. */ static void -static_builtin_state_init(PyInterpreterState *interp, PyTypeObject *self) +managed_static_type_state_init(PyInterpreterState *interp, PyTypeObject *self, + int isbuiltin, int initial) { - if (_Py_IsMainInterpreter(interp)) { - assert(!static_builtin_index_is_set(self)); - static_builtin_index_set(self, interp->types.num_builtins_initialized); + size_t index; + if (initial) { + assert(!managed_static_type_index_is_set(self)); + if (isbuiltin) { + index = interp->types.builtins.num_initialized; + assert(index < _Py_MAX_MANAGED_STATIC_BUILTIN_TYPES); + } + else { + PyMutex_Lock(&interp->types.mutex); + index = interp->types.for_extensions.next_index; + interp->types.for_extensions.next_index++; + PyMutex_Unlock(&interp->types.mutex); + assert(index < _Py_MAX_MANAGED_STATIC_EXT_TYPES); + } + managed_static_type_index_set(self, index); } else { - assert(static_builtin_index_get(self) == - interp->types.num_builtins_initialized); + index = managed_static_type_index_get(self); + if (isbuiltin) { + assert(index == interp->types.builtins.num_initialized); + assert(index < _Py_MAX_MANAGED_STATIC_BUILTIN_TYPES); + } + else { + assert(index < _Py_MAX_MANAGED_STATIC_EXT_TYPES); + } } - static_builtin_state *state = static_builtin_state_get(interp, self); - /* It should only be called once for each builtin type. */ + managed_static_type_state *state = isbuiltin + ? &(interp->types.builtins.initialized[index]) + : &(interp->types.for_extensions.initialized[index]); + + /* It should only be called once for each builtin type per interpreter. */ assert(state->type == NULL); state->type = self; + state->isbuiltin = isbuiltin; /* state->tp_subclasses is left NULL until init_subclasses() sets it. */ /* state->tp_weaklist is left NULL until insert_head() or insert_after() (in weakrefobject.c) sets it. */ - interp->types.num_builtins_initialized++; + if (isbuiltin) { + interp->types.builtins.num_initialized++; + } + else { + interp->types.for_extensions.num_initialized++; + } } /* Reset the type's per-interpreter state. - This basically undoes what static_builtin_state_init() did. */ + This basically undoes what managed_static_type_state_init() did. */ static void -static_builtin_state_clear(PyInterpreterState *interp, PyTypeObject *self) +managed_static_type_state_clear(PyInterpreterState *interp, PyTypeObject *self, + int isbuiltin, int final) { - static_builtin_state *state = static_builtin_state_get(interp, self); + managed_static_type_state *state = isbuiltin + ? static_builtin_state_get(interp, self) + : static_ext_type_state_get(interp, self); assert(state->type != NULL); state->type = NULL; assert(state->tp_weaklist == NULL); // It was already cleared out. - if (_Py_IsMainInterpreter(interp)) { - static_builtin_index_clear(self); + if (final) { + managed_static_type_index_clear(self); } - assert(interp->types.num_builtins_initialized > 0); - interp->types.num_builtins_initialized--; + if (isbuiltin) { + assert(interp->types.builtins.num_initialized > 0); + interp->types.builtins.num_initialized--; + } + else { + PyMutex_Lock(&interp->types.mutex); + assert(interp->types.for_extensions.num_initialized > 0); + interp->types.for_extensions.num_initialized--; + if (interp->types.for_extensions.num_initialized == 0) { + interp->types.for_extensions.next_index = 0; + } + PyMutex_Unlock(&interp->types.mutex); + } } -// Also see _PyStaticType_InitBuiltin() and _PyStaticType_Dealloc(). +// Also see _PyStaticType_InitBuiltin() and _PyStaticType_FiniBuiltin(). /* end static builtin helpers */ @@ -227,7 +293,7 @@ start_readying(PyTypeObject *type) { if (type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { PyInterpreterState *interp = _PyInterpreterState_GET(); - static_builtin_state *state = static_builtin_state_get(interp, type); + managed_static_type_state *state = managed_static_type_state_get(interp, type); assert(state != NULL); assert(!state->readying); state->readying = 1; @@ -242,7 +308,7 @@ stop_readying(PyTypeObject *type) { if (type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { PyInterpreterState *interp = _PyInterpreterState_GET(); - static_builtin_state *state = static_builtin_state_get(interp, type); + managed_static_type_state *state = managed_static_type_state_get(interp, type); assert(state != NULL); assert(state->readying); state->readying = 0; @@ -257,7 +323,7 @@ is_readying(PyTypeObject *type) { if (type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { PyInterpreterState *interp = _PyInterpreterState_GET(); - static_builtin_state *state = static_builtin_state_get(interp, type); + managed_static_type_state *state = managed_static_type_state_get(interp, type); assert(state != NULL); return state->readying; } @@ -272,7 +338,7 @@ lookup_tp_dict(PyTypeObject *self) { if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { PyInterpreterState *interp = _PyInterpreterState_GET(); - static_builtin_state *state = _PyStaticType_GetState(interp, self); + managed_static_type_state *state = _PyStaticType_GetState(interp, self); assert(state != NULL); return state->tp_dict; } @@ -298,7 +364,7 @@ set_tp_dict(PyTypeObject *self, PyObject *dict) { if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { PyInterpreterState *interp = _PyInterpreterState_GET(); - static_builtin_state *state = _PyStaticType_GetState(interp, self); + managed_static_type_state *state = _PyStaticType_GetState(interp, self); assert(state != NULL); state->tp_dict = dict; return; @@ -311,7 +377,7 @@ clear_tp_dict(PyTypeObject *self) { if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { PyInterpreterState *interp = _PyInterpreterState_GET(); - static_builtin_state *state = _PyStaticType_GetState(interp, self); + managed_static_type_state *state = _PyStaticType_GetState(interp, self); assert(state != NULL); Py_CLEAR(state->tp_dict); return; @@ -340,13 +406,13 @@ _PyType_GetBases(PyTypeObject *self) } static inline void -set_tp_bases(PyTypeObject *self, PyObject *bases) +set_tp_bases(PyTypeObject *self, PyObject *bases, int initial) { assert(PyTuple_CheckExact(bases)); if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { // XXX tp_bases can probably be statically allocated for each // static builtin type. - assert(_Py_IsMainInterpreter(_PyInterpreterState_GET())); + assert(initial); assert(self->tp_bases == NULL); if (PyTuple_GET_SIZE(bases) == 0) { assert(self->tp_base == NULL); @@ -363,10 +429,10 @@ set_tp_bases(PyTypeObject *self, PyObject *bases) } static inline void -clear_tp_bases(PyTypeObject *self) +clear_tp_bases(PyTypeObject *self, int final) { if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { - if (_Py_IsMainInterpreter(_PyInterpreterState_GET())) { + if (final) { if (self->tp_bases != NULL) { if (PyTuple_GET_SIZE(self->tp_bases) == 0) { Py_CLEAR(self->tp_bases); @@ -413,13 +479,13 @@ _PyType_GetMRO(PyTypeObject *self) } static inline void -set_tp_mro(PyTypeObject *self, PyObject *mro) +set_tp_mro(PyTypeObject *self, PyObject *mro, int initial) { assert(PyTuple_CheckExact(mro)); if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { // XXX tp_mro can probably be statically allocated for each // static builtin type. - assert(_Py_IsMainInterpreter(_PyInterpreterState_GET())); + assert(initial); assert(self->tp_mro == NULL); /* Other checks are done via set_tp_bases. */ _Py_SetImmortal(mro); @@ -428,10 +494,10 @@ set_tp_mro(PyTypeObject *self, PyObject *mro) } static inline void -clear_tp_mro(PyTypeObject *self) +clear_tp_mro(PyTypeObject *self, int final) { if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { - if (_Py_IsMainInterpreter(_PyInterpreterState_GET())) { + if (final) { if (self->tp_mro != NULL) { if (PyTuple_GET_SIZE(self->tp_mro) == 0) { Py_CLEAR(self->tp_mro); @@ -457,7 +523,7 @@ init_tp_subclasses(PyTypeObject *self) } if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { PyInterpreterState *interp = _PyInterpreterState_GET(); - static_builtin_state *state = _PyStaticType_GetState(interp, self); + managed_static_type_state *state = _PyStaticType_GetState(interp, self); state->tp_subclasses = subclasses; return subclasses; } @@ -473,7 +539,7 @@ clear_tp_subclasses(PyTypeObject *self) has no subclass. */ if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { PyInterpreterState *interp = _PyInterpreterState_GET(); - static_builtin_state *state = _PyStaticType_GetState(interp, self); + managed_static_type_state *state = _PyStaticType_GetState(interp, self); Py_CLEAR(state->tp_subclasses); return; } @@ -485,7 +551,7 @@ lookup_tp_subclasses(PyTypeObject *self) { if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { PyInterpreterState *interp = _PyInterpreterState_GET(); - static_builtin_state *state = _PyStaticType_GetState(interp, self); + managed_static_type_state *state = _PyStaticType_GetState(interp, self); assert(state != NULL); return state->tp_subclasses; } @@ -774,10 +840,10 @@ _PyTypes_Fini(PyInterpreterState *interp) struct type_cache *cache = &interp->types.type_cache; type_cache_clear(cache, NULL); - assert(interp->types.num_builtins_initialized == 0); + assert(interp->types.builtins.num_initialized == 0); // All the static builtin types should have been finalized already. - for (size_t i = 0; i < _Py_MAX_STATIC_BUILTIN_TYPES; i++) { - assert(interp->types.builtins[i].type == NULL); + for (size_t i = 0; i < _Py_MAX_MANAGED_STATIC_BUILTIN_TYPES; i++) { + assert(interp->types.builtins.initialized[i].type == NULL); } } @@ -1444,7 +1510,7 @@ mro_hierarchy(PyTypeObject *type, PyObject *temp) Py_XDECREF(tuple); if (res < 0) { - set_tp_mro(type, old_mro); + set_tp_mro(type, old_mro, 0); Py_DECREF(new_mro); return -1; } @@ -1545,7 +1611,7 @@ type_set_bases_unlocked(PyTypeObject *type, PyObject *new_bases, void *context) assert(old_bases != NULL); PyTypeObject *old_base = type->tp_base; - set_tp_bases(type, Py_NewRef(new_bases)); + set_tp_bases(type, Py_NewRef(new_bases), 0); type->tp_base = (PyTypeObject *)Py_NewRef(new_base); PyObject *temp = PyList_New(0); @@ -1593,7 +1659,7 @@ type_set_bases_unlocked(PyTypeObject *type, PyObject *new_bases, void *context) "", 2, 3, &cls, &new_mro, &old_mro); /* Do not rollback if cls has a newer version of MRO. */ if (lookup_tp_mro(cls) == new_mro) { - set_tp_mro(cls, Py_XNewRef(old_mro)); + set_tp_mro(cls, Py_XNewRef(old_mro), 0); Py_DECREF(new_mro); } } @@ -1603,7 +1669,7 @@ type_set_bases_unlocked(PyTypeObject *type, PyObject *new_bases, void *context) if (lookup_tp_bases(type) == new_bases) { assert(type->tp_base == new_base); - set_tp_bases(type, old_bases); + set_tp_bases(type, old_bases, 0); type->tp_base = old_base; Py_DECREF(new_bases); @@ -1742,6 +1808,10 @@ type_set_annotations(PyTypeObject *type, PyObject *value, void *context) static PyObject * type_get_type_params(PyTypeObject *type, void *context) { + if (type == &PyType_Type) { + return PyTuple_New(0); + } + PyObject *params; if (PyDict_GetItemRef(lookup_tp_dict(type), &_Py_ID(__type_params__), ¶ms) == 0) { return PyTuple_New(0); @@ -2973,7 +3043,7 @@ mro_invoke(PyTypeObject *type) - Returns -1 in case of an error. */ static int -mro_internal_unlocked(PyTypeObject *type, PyObject **p_old_mro) +mro_internal_unlocked(PyTypeObject *type, int initial, PyObject **p_old_mro) { ASSERT_TYPE_LOCK_HELD(); @@ -2996,7 +3066,7 @@ mro_internal_unlocked(PyTypeObject *type, PyObject **p_old_mro) return 0; } - set_tp_mro(type, new_mro); + set_tp_mro(type, new_mro, initial); type_mro_modified(type, new_mro); /* corner case: the super class might have been hidden @@ -3026,7 +3096,7 @@ mro_internal(PyTypeObject *type, PyObject **p_old_mro) { int res; BEGIN_TYPE_LOCK() - res = mro_internal_unlocked(type, p_old_mro); + res = mro_internal_unlocked(type, 0, p_old_mro); END_TYPE_LOCK() return res; } @@ -3621,7 +3691,7 @@ type_new_alloc(type_new_ctx *ctx) type->tp_as_mapping = &et->as_mapping; type->tp_as_buffer = &et->as_buffer; - set_tp_bases(type, Py_NewRef(ctx->bases)); + set_tp_bases(type, Py_NewRef(ctx->bases), 1); type->tp_base = (PyTypeObject *)Py_NewRef(ctx->base); type->tp_dealloc = subtype_dealloc; @@ -4615,7 +4685,7 @@ _PyType_FromMetaclass_impl( /* Set slots we have prepared */ type->tp_base = (PyTypeObject *)Py_NewRef(base); - set_tp_bases(type, bases); + set_tp_bases(type, bases, 1); bases = NULL; // We give our reference to bases to the type type->tp_doc = tp_doc; @@ -5058,7 +5128,7 @@ is_dunder_name(PyObject *name) return 0; } -static void +static PyObject * update_cache(struct type_cache_entry *entry, PyObject *name, unsigned int version_tag, PyObject *value) { _Py_atomic_store_uint32_relaxed(&entry->version, version_tag); @@ -5069,7 +5139,7 @@ update_cache(struct type_cache_entry *entry, PyObject *name, unsigned int versio // exact unicode object or Py_None so it's safe to do so. PyObject *old_name = entry->name; _Py_atomic_store_ptr_relaxed(&entry->name, Py_NewRef(name)); - Py_DECREF(old_name); + return old_name; } #if Py_GIL_DISABLED @@ -5089,10 +5159,12 @@ update_cache_gil_disabled(struct type_cache_entry *entry, PyObject *name, return; } - update_cache(entry, name, version_tag, value); + PyObject *old_value = update_cache(entry, name, version_tag, value); // Then update sequence to the next valid value _PySeqLock_UnlockWrite(&entry->sequence); + + Py_DECREF(old_value); } #endif @@ -5204,7 +5276,8 @@ _PyType_LookupRef(PyTypeObject *type, PyObject *name) #if Py_GIL_DISABLED update_cache_gil_disabled(entry, name, version, res); #else - update_cache(entry, name, version, res); + PyObject *old_value = update_cache(entry, name, version, res); + Py_DECREF(old_value); #endif } return res; @@ -5517,7 +5590,7 @@ type_dealloc_common(PyTypeObject *type) static void -clear_static_tp_subclasses(PyTypeObject *type) +clear_static_tp_subclasses(PyTypeObject *type, int isbuiltin) { PyObject *subclasses = lookup_tp_subclasses(type); if (subclasses == NULL) { @@ -5554,47 +5627,64 @@ clear_static_tp_subclasses(PyTypeObject *type) continue; } // All static builtin subtypes should have been finalized already. - assert(!(subclass->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN)); + assert(!isbuiltin || !(subclass->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN)); Py_DECREF(subclass); } +#else + (void)isbuiltin; #endif clear_tp_subclasses(type); } static void -clear_static_type_objects(PyInterpreterState *interp, PyTypeObject *type) +clear_static_type_objects(PyInterpreterState *interp, PyTypeObject *type, + int isbuiltin, int final) { - if (_Py_IsMainInterpreter(interp)) { + if (final) { Py_CLEAR(type->tp_cache); } clear_tp_dict(type); - clear_tp_bases(type); - clear_tp_mro(type); - clear_static_tp_subclasses(type); + clear_tp_bases(type, final); + clear_tp_mro(type, final); + clear_static_tp_subclasses(type, isbuiltin); } -void -_PyStaticType_Dealloc(PyInterpreterState *interp, PyTypeObject *type) + +static void +fini_static_type(PyInterpreterState *interp, PyTypeObject *type, + int isbuiltin, int final) { assert(type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN); assert(_Py_IsImmortal((PyObject *)type)); type_dealloc_common(type); - clear_static_type_objects(interp, type); + clear_static_type_objects(interp, type, isbuiltin, final); - if (_Py_IsMainInterpreter(interp)) { + if (final) { type->tp_flags &= ~Py_TPFLAGS_READY; type->tp_flags &= ~Py_TPFLAGS_VALID_VERSION_TAG; type->tp_version_tag = 0; } _PyStaticType_ClearWeakRefs(interp, type); - static_builtin_state_clear(interp, type); + managed_static_type_state_clear(interp, type, isbuiltin, final); /* We leave _Py_TPFLAGS_STATIC_BUILTIN set on tp_flags. */ } +void +_PyStaticType_FiniForExtension(PyInterpreterState *interp, PyTypeObject *type, int final) +{ + fini_static_type(interp, type, 0, final); +} + +void +_PyStaticType_FiniBuiltin(PyInterpreterState *interp, PyTypeObject *type) +{ + fini_static_type(interp, type, 1, _Py_IsMainInterpreter(interp)); +} + static void type_dealloc(PyObject *self) @@ -6036,15 +6126,19 @@ object_new(PyTypeObject *type, PyObject *args, PyObject *kwds) return NULL; } comma_w_quotes_sep = PyUnicode_FromString("', '"); + if (!comma_w_quotes_sep) { + Py_DECREF(sorted_methods); + return NULL; + } joined = PyUnicode_Join(comma_w_quotes_sep, sorted_methods); - method_count = PyObject_Length(sorted_methods); - Py_DECREF(sorted_methods); + Py_DECREF(comma_w_quotes_sep); if (joined == NULL) { - Py_DECREF(comma_w_quotes_sep); + Py_DECREF(sorted_methods); return NULL; } + method_count = PyObject_Length(sorted_methods); + Py_DECREF(sorted_methods); if (method_count == -1) { - Py_DECREF(comma_w_quotes_sep); Py_DECREF(joined); return NULL; } @@ -6056,7 +6150,6 @@ object_new(PyTypeObject *type, PyObject *args, PyObject *kwds) method_count > 1 ? "s" : "", joined); Py_DECREF(joined); - Py_DECREF(comma_w_quotes_sep); return NULL; } PyObject *obj = type->tp_alloc(type, 0); @@ -7034,8 +7127,11 @@ object___sizeof___impl(PyObject *self) res = 0; isize = Py_TYPE(self)->tp_itemsize; - if (isize > 0) - res = Py_SIZE(self) * isize; + if (isize > 0) { + /* This assumes that ob_size is valid if tp_itemsize is not 0, + which isn't true for PyLongObject. */ + res = _PyVarObject_CAST(self)->ob_size * isize; + } res += Py_TYPE(self)->tp_basicsize; return PyLong_FromSsize_t(res); @@ -7642,10 +7738,10 @@ type_ready_set_type(PyTypeObject *type) } static int -type_ready_set_bases(PyTypeObject *type) +type_ready_set_bases(PyTypeObject *type, int initial) { if (type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { - if (!_Py_IsMainInterpreter(_PyInterpreterState_GET())) { + if (!initial) { assert(lookup_tp_bases(type) != NULL); return 0; } @@ -7664,7 +7760,7 @@ type_ready_set_bases(PyTypeObject *type) if (bases == NULL) { return -1; } - set_tp_bases(type, bases); + set_tp_bases(type, bases, 1); } return 0; } @@ -7774,12 +7870,12 @@ type_ready_preheader(PyTypeObject *type) } static int -type_ready_mro(PyTypeObject *type) +type_ready_mro(PyTypeObject *type, int initial) { ASSERT_TYPE_LOCK_HELD(); if (type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { - if (!_Py_IsMainInterpreter(_PyInterpreterState_GET())) { + if (!initial) { assert(lookup_tp_mro(type) != NULL); return 0; } @@ -7787,7 +7883,7 @@ type_ready_mro(PyTypeObject *type) } /* Calculate method resolution order */ - if (mro_internal_unlocked(type, NULL) < 0) { + if (mro_internal_unlocked(type, initial, NULL) < 0) { return -1; } PyObject *mro = lookup_tp_mro(type); @@ -7942,7 +8038,7 @@ type_ready_add_subclasses(PyTypeObject *type) // Set tp_new and the "__new__" key in the type dictionary. // Use the Py_TPFLAGS_DISALLOW_INSTANTIATION flag. static int -type_ready_set_new(PyTypeObject *type, int rerunbuiltin) +type_ready_set_new(PyTypeObject *type, int initial) { PyTypeObject *base = type->tp_base; /* The condition below could use some explanation. @@ -7964,7 +8060,7 @@ type_ready_set_new(PyTypeObject *type, int rerunbuiltin) if (!(type->tp_flags & Py_TPFLAGS_DISALLOW_INSTANTIATION)) { if (type->tp_new != NULL) { - if (!rerunbuiltin || base == NULL || type->tp_new != base->tp_new) { + if (initial || base == NULL || type->tp_new != base->tp_new) { // If "__new__" key does not exists in the type dictionary, // set it to tp_new_wrapper(). if (add_tp_new_wrapper(type) < 0) { @@ -8046,7 +8142,7 @@ type_ready_post_checks(PyTypeObject *type) static int -type_ready(PyTypeObject *type, int rerunbuiltin) +type_ready(PyTypeObject *type, int initial) { ASSERT_TYPE_LOCK_HELD(); @@ -8076,19 +8172,19 @@ type_ready(PyTypeObject *type, int rerunbuiltin) if (type_ready_set_type(type) < 0) { goto error; } - if (type_ready_set_bases(type) < 0) { + if (type_ready_set_bases(type, initial) < 0) { goto error; } - if (type_ready_mro(type) < 0) { + if (type_ready_mro(type, initial) < 0) { goto error; } - if (type_ready_set_new(type, rerunbuiltin) < 0) { + if (type_ready_set_new(type, initial) < 0) { goto error; } if (type_ready_fill_dict(type) < 0) { goto error; } - if (!rerunbuiltin) { + if (initial) { if (type_ready_inherit(type) < 0) { goto error; } @@ -8102,7 +8198,7 @@ type_ready(PyTypeObject *type, int rerunbuiltin) if (type_ready_add_subclasses(type) < 0) { goto error; } - if (!rerunbuiltin) { + if (initial) { if (type_ready_managed_dict(type) < 0) { goto error; } @@ -8142,7 +8238,7 @@ PyType_Ready(PyTypeObject *type) int res; BEGIN_TYPE_LOCK() if (!(type->tp_flags & Py_TPFLAGS_READY)) { - res = type_ready(type, 0); + res = type_ready(type, 1); } else { res = 0; assert(_PyType_CheckConsistency(type)); @@ -8151,17 +8247,18 @@ PyType_Ready(PyTypeObject *type) return res; } -int -_PyStaticType_InitBuiltin(PyInterpreterState *interp, PyTypeObject *self) + +static int +init_static_type(PyInterpreterState *interp, PyTypeObject *self, + int isbuiltin, int initial) { assert(_Py_IsImmortal((PyObject *)self)); assert(!(self->tp_flags & Py_TPFLAGS_HEAPTYPE)); assert(!(self->tp_flags & Py_TPFLAGS_MANAGED_DICT)); assert(!(self->tp_flags & Py_TPFLAGS_MANAGED_WEAKREF)); - int ismain = _Py_IsMainInterpreter(interp); if ((self->tp_flags & Py_TPFLAGS_READY) == 0) { - assert(ismain); + assert(initial); self->tp_flags |= _Py_TPFLAGS_STATIC_BUILTIN; self->tp_flags |= Py_TPFLAGS_IMMUTABLETYPE; @@ -8171,24 +8268,36 @@ _PyStaticType_InitBuiltin(PyInterpreterState *interp, PyTypeObject *self) self->tp_flags |= Py_TPFLAGS_VALID_VERSION_TAG; } else { - assert(!ismain); + assert(!initial); assert(self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN); assert(self->tp_flags & Py_TPFLAGS_VALID_VERSION_TAG); } - static_builtin_state_init(interp, self); + managed_static_type_state_init(interp, self, isbuiltin, initial); int res; BEGIN_TYPE_LOCK(); - res = type_ready(self, !ismain); + res = type_ready(self, initial); END_TYPE_LOCK() if (res < 0) { _PyStaticType_ClearWeakRefs(interp, self); - static_builtin_state_clear(interp, self); + managed_static_type_state_clear(interp, self, isbuiltin, initial); } return res; } +int +_PyStaticType_InitForExtension(PyInterpreterState *interp, PyTypeObject *self) +{ + return init_static_type(interp, self, 0, ((self->tp_flags & Py_TPFLAGS_READY) == 0)); +} + +int +_PyStaticType_InitBuiltin(PyInterpreterState *interp, PyTypeObject *self) +{ + return init_static_type(interp, self, 1, _Py_IsMainInterpreter(interp)); +} + static int add_subclass(PyTypeObject *base, PyTypeObject *type) diff --git a/Objects/typevarobject.c b/Objects/typevarobject.c index cc916045266aea..c8ab14053de418 100644 --- a/Objects/typevarobject.c +++ b/Objects/typevarobject.c @@ -73,7 +73,7 @@ NoDefault_reduce(PyObject *op, PyObject *Py_UNUSED(ignored)) return PyUnicode_FromString("NoDefault"); } -static PyMethodDef notimplemented_methods[] = { +static PyMethodDef nodefault_methods[] = { {"__reduce__", NoDefault_reduce, METH_NOARGS, NULL}, {NULL, NULL} }; @@ -98,7 +98,7 @@ nodefault_dealloc(PyObject *nodefault) _Py_SetImmortal(nodefault); } -PyDoc_STRVAR(notimplemented_doc, +PyDoc_STRVAR(nodefault_doc, "NoDefaultType()\n" "--\n\n" "The type of the NoDefault singleton."); @@ -109,8 +109,8 @@ PyTypeObject _PyNoDefault_Type = { .tp_dealloc = nodefault_dealloc, .tp_repr = NoDefault_repr, .tp_flags = Py_TPFLAGS_DEFAULT, - .tp_doc = notimplemented_doc, - .tp_methods = notimplemented_methods, + .tp_doc = nodefault_doc, + .tp_methods = nodefault_methods, .tp_new = nodefault_new, }; diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 057b417074ebea..59ca344afe7f3e 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -44,6 +44,7 @@ OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. #include "pycore_bytesobject.h" // _PyBytes_Repeat() #include "pycore_ceval.h" // _PyEval_GetBuiltin() #include "pycore_codecs.h" // _PyCodec_Lookup() +#include "pycore_critical_section.h" // Py_*_CRITICAL_SECTION_SEQUENCE_FAST #include "pycore_format.h" // F_LJUST #include "pycore_initconfig.h" // _PyStatus_OK() #include "pycore_interp.h" // PyInterpreterState.fs_codec @@ -1624,7 +1625,7 @@ unicode_modifiable(PyObject *unicode) assert(_PyUnicode_CHECK(unicode)); if (Py_REFCNT(unicode) != 1) return 0; - if (_PyUnicode_HASH(unicode) != -1) + if (FT_ATOMIC_LOAD_SSIZE_RELAXED(_PyUnicode_HASH(unicode)) != -1) return 0; if (PyUnicode_CHECK_INTERNED(unicode)) return 0; @@ -9559,13 +9560,14 @@ PyUnicode_Join(PyObject *separator, PyObject *seq) return NULL; } - /* NOTE: the following code can't call back into Python code, - * so we are sure that fseq won't be mutated. - */ + Py_BEGIN_CRITICAL_SECTION_SEQUENCE_FAST(seq); items = PySequence_Fast_ITEMS(fseq); seqlen = PySequence_Fast_GET_SIZE(fseq); res = _PyUnicode_JoinArray(separator, items, seqlen); + + Py_END_CRITICAL_SECTION_SEQUENCE_FAST(); + Py_DECREF(fseq); return res; } @@ -10817,9 +10819,10 @@ _PyUnicode_EqualToASCIIId(PyObject *left, _Py_Identifier *right) if (PyUnicode_CHECK_INTERNED(left)) return 0; - assert(_PyUnicode_HASH(right_uni) != -1); - Py_hash_t hash = _PyUnicode_HASH(left); - if (hash != -1 && hash != _PyUnicode_HASH(right_uni)) { + Py_hash_t right_hash = FT_ATOMIC_LOAD_SSIZE_RELAXED(_PyUnicode_HASH(right_uni)); + assert(right_hash != -1); + Py_hash_t hash = FT_ATOMIC_LOAD_SSIZE_RELAXED(_PyUnicode_HASH(left)); + if (hash != -1 && hash != right_hash) { return 0; } @@ -11304,12 +11307,14 @@ unicode_hash(PyObject *self) #ifdef Py_DEBUG assert(_Py_HashSecret_Initialized); #endif - if (_PyUnicode_HASH(self) != -1) - return _PyUnicode_HASH(self); - + Py_hash_t hash = FT_ATOMIC_LOAD_SSIZE_RELAXED(_PyUnicode_HASH(self)); + if (hash != -1) { + return hash; + } x = _Py_HashBytes(PyUnicode_DATA(self), PyUnicode_GET_LENGTH(self) * PyUnicode_KIND(self)); - _PyUnicode_HASH(self) = x; + + FT_ATOMIC_STORE_SSIZE_RELAXED(_PyUnicode_HASH(self), x); return x; } @@ -13409,7 +13414,7 @@ Return a formatted version of the string, using substitutions from args and kwar The substitutions are identified by braces ('{' and '}')."); PyDoc_STRVAR(format_map__doc__, - "format_map($self, /, mapping)\n\ + "format_map($self, mapping, /)\n\ --\n\ \n\ Return a formatted version of the string, using substitutions from mapping.\n\ @@ -15505,9 +15510,9 @@ unicode_is_finalizing(void) void _PyUnicode_FiniTypes(PyInterpreterState *interp) { - _PyStaticType_Dealloc(interp, &EncodingMapType); - _PyStaticType_Dealloc(interp, &PyFieldNameIter_Type); - _PyStaticType_Dealloc(interp, &PyFormatterIter_Type); + _PyStaticType_FiniBuiltin(interp, &EncodingMapType); + _PyStaticType_FiniBuiltin(interp, &PyFieldNameIter_Type); + _PyStaticType_FiniBuiltin(interp, &PyFormatterIter_Type); } diff --git a/Objects/weakrefobject.c b/Objects/weakrefobject.c index 88afaec86827ed..3b027e1b518ba6 100644 --- a/Objects/weakrefobject.c +++ b/Objects/weakrefobject.c @@ -1066,7 +1066,7 @@ PyObject_ClearWeakRefs(PyObject *object) void _PyStaticType_ClearWeakRefs(PyInterpreterState *interp, PyTypeObject *type) { - static_builtin_state *state = _PyStaticType_GetState(interp, type); + managed_static_type_state *state = _PyStaticType_GetState(interp, type); PyObject **list = _PyStaticType_GET_WEAKREFS_LISTPTR(state); // This is safe to do without holding the lock in free-threaded builds; // there is only one thread running and no new threads can be created. diff --git a/PC/launcher2.c b/PC/launcher2.c index 139aa61bbe5cc2..b372044e353202 100644 --- a/PC/launcher2.c +++ b/PC/launcher2.c @@ -853,7 +853,7 @@ searchPath(SearchInfo *search, const wchar_t *shebang, int shebangLength) } wchar_t filename[MAXLEN]; - if (wcsncpy_s(filename, MAXLEN, command, lastDot)) { + if (wcsncpy_s(filename, MAXLEN, command, commandLength)) { return RC_BAD_VIRTUAL_PATH; } @@ -2707,6 +2707,11 @@ process(int argc, wchar_t ** argv) DWORD len = GetEnvironmentVariableW(L"PYLAUNCHER_LIMIT_TO_COMPANY", NULL, 0); if (len > 1) { wchar_t *limitToCompany = allocSearchInfoBuffer(&search, len); + if (!limitToCompany) { + exitCode = RC_NO_MEMORY; + winerror(0, L"Failed to allocate internal buffer"); + goto abort; + } search.limitToCompany = limitToCompany; if (0 == GetEnvironmentVariableW(L"PYLAUNCHER_LIMIT_TO_COMPANY", limitToCompany, len)) { exitCode = RC_INTERNAL_ERROR; diff --git a/PC/layout/main.py b/PC/layout/main.py index d176b272f1c19d..0350ed7af3f9b5 100644 --- a/PC/layout/main.py +++ b/PC/layout/main.py @@ -121,7 +121,7 @@ def get_tcltk_lib(ns): def get_layout(ns): - def in_build(f, dest="", new_name=None): + def in_build(f, dest="", new_name=None, no_lib=False): n, _, x = f.rpartition(".") n = new_name or n src = ns.build / f @@ -136,7 +136,7 @@ def in_build(f, dest="", new_name=None): pdb = src.with_suffix(".pdb") if pdb.is_file(): yield dest + n + ".pdb", pdb - if ns.include_dev: + if ns.include_dev and not no_lib: lib = src.with_suffix(".lib") if lib.is_file(): yield "libs/" + n + ".lib", lib @@ -202,7 +202,9 @@ def in_build(f, dest="", new_name=None): yield "LICENSE.txt", ns.build / "LICENSE.txt" - for dest, src in rglob(ns.build, "*.pyd"): + dest = "" if ns.flat_dlls else "DLLs/" + + for _, src in rglob(ns.build, "*.pyd"): if ns.include_freethreaded: if not src.match("*.cp*t-win*.pyd"): continue @@ -217,14 +219,14 @@ def in_build(f, dest="", new_name=None): continue if src in TCLTK_PYDS_ONLY and not ns.include_tcltk: continue - yield from in_build(src.name, dest="" if ns.flat_dlls else "DLLs/") + yield from in_build(src.name, dest=dest, no_lib=True) - for dest, src in rglob(ns.build, "*.dll"): + for _, src in rglob(ns.build, "*.dll"): if src.stem.endswith("_d") != bool(ns.debug) and src not in REQUIRED_DLLS: continue if src in EXCLUDE_FROM_DLLS: continue - yield from in_build(src.name, dest="" if ns.flat_dlls else "DLLs/") + yield from in_build(src.name, dest=dest, no_lib=True) if ns.zip_lib: zip_name = PYTHON_ZIP_NAME @@ -599,6 +601,15 @@ def main(): ns.source = ns.source or (Path(__file__).resolve().parent.parent.parent) ns.build = ns.build or Path(sys.executable).parent ns.doc_build = ns.doc_build or (ns.source / "Doc" / "build") + if ns.copy and not ns.copy.is_absolute(): + ns.copy = (Path.cwd() / ns.copy).resolve() + if not ns.temp: + # Put temp on a Dev Drive for speed if we're copying to one. + # If not, the regular temp dir will have to do. + if ns.copy and getattr(os.path, "isdevdrive", lambda d: False)(ns.copy): + ns.temp = ns.copy.with_name(ns.copy.name + "_temp") + else: + ns.temp = Path(tempfile.mkdtemp()) if not ns.source.is_absolute(): ns.source = (Path.cwd() / ns.source).resolve() if not ns.build.is_absolute(): @@ -617,21 +628,11 @@ def main(): else: ns.arch = "amd64" - if ns.copy and not ns.copy.is_absolute(): - ns.copy = (Path.cwd() / ns.copy).resolve() if ns.zip and not ns.zip.is_absolute(): ns.zip = (Path.cwd() / ns.zip).resolve() if ns.catalog and not ns.catalog.is_absolute(): ns.catalog = (Path.cwd() / ns.catalog).resolve() - if not ns.temp: - # Put temp on a Dev Drive for speed if we're copying to one. - # If not, the regular temp dir will have to do. - if ns.copy and getattr(os.path, "isdevdrive", lambda d: False)(ns.copy): - ns.temp = ns.copy.with_name(ns.copy.name + "_temp") - else: - ns.temp = Path(tempfile.mkdtemp()) - configure_logger(ns) log_info( diff --git a/PCbuild/get_externals.bat b/PCbuild/get_externals.bat index 761d3de93b777d..1927938ef0821c 100644 --- a/PCbuild/get_externals.bat +++ b/PCbuild/get_externals.bat @@ -56,8 +56,8 @@ if NOT "%IncludeLibffiSrc%"=="false" set libraries=%libraries% libffi-3.4.4 if NOT "%IncludeSSLSrc%"=="false" set libraries=%libraries% openssl-3.0.13 set libraries=%libraries% mpdecimal-4.0.0 set libraries=%libraries% sqlite-3.45.3.0 -if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tcl-core-8.6.13.1 -if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tk-8.6.13.1 +if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tcl-core-8.6.14.0 +if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tk-8.6.14.0 set libraries=%libraries% xz-5.2.5 set libraries=%libraries% zlib-1.3.1 @@ -78,7 +78,7 @@ echo.Fetching external binaries... set binaries= if NOT "%IncludeLibffi%"=="false" set binaries=%binaries% libffi-3.4.4 if NOT "%IncludeSSL%"=="false" set binaries=%binaries% openssl-bin-3.0.13 -if NOT "%IncludeTkinter%"=="false" set binaries=%binaries% tcltk-8.6.13.1 +if NOT "%IncludeTkinter%"=="false" set binaries=%binaries% tcltk-8.6.14.0 if NOT "%IncludeSSLSrc%"=="false" set binaries=%binaries% nasm-2.11.06 for %%b in (%binaries%) do ( diff --git a/PCbuild/tcltk.props b/PCbuild/tcltk.props index 8ddf01d5dd1dca..95b699b4cac0aa 100644 --- a/PCbuild/tcltk.props +++ b/PCbuild/tcltk.props @@ -2,7 +2,7 @@ - 8.6.13.1 + 8.6.14.0 $(TclVersion) $([System.Version]::Parse($(TclVersion)).Major) $([System.Version]::Parse($(TclVersion)).Minor) diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py index 11d59faeb0d42c..9961d23629abc5 100755 --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -1022,6 +1022,13 @@ def visitModule(self, mod): goto set_remaining_cleanup; } } + else if (type == state->expr_context_type) { + // special case for expr_context: default to Load() + res = PyObject_SetAttr(self, name, state->Load_singleton); + if (res < 0) { + goto set_remaining_cleanup; + } + } else { // simple field (e.g., identifier) if (PyErr_WarnFormat( diff --git a/Parser/parser.c b/Parser/parser.c index 7bfc17a92e29de..0508aa0add5b85 100644 --- a/Parser/parser.c +++ b/Parser/parser.c @@ -21,28 +21,28 @@ static KeywordToken *reserved_keywords[] = { (KeywordToken[]) {{NULL, -1}}, (KeywordToken[]) {{NULL, -1}}, (KeywordToken[]) { - {"if", 662}, - {"as", 660}, - {"in", 673}, + {"if", 660}, + {"as", 658}, + {"in", 671}, {"or", 581}, {"is", 589}, {NULL, -1}, }, (KeywordToken[]) { {"del", 616}, - {"def", 677}, - {"for", 672}, - {"try", 644}, + {"def", 675}, + {"for", 670}, + {"try", 642}, {"and", 582}, - {"not", 681}, + {"not", 679}, {NULL, -1}, }, (KeywordToken[]) { {"from", 621}, {"pass", 504}, - {"with", 635}, - {"elif", 664}, - {"else", 665}, + {"with", 633}, + {"elif", 662}, + {"else", 663}, {"None", 614}, {"True", 613}, {NULL, -1}, @@ -51,9 +51,9 @@ static KeywordToken *reserved_keywords[] = { {"raise", 525}, {"yield", 580}, {"break", 508}, - {"async", 676}, - {"class", 679}, - {"while", 667}, + {"async", 674}, + {"class", 677}, + {"while", 665}, {"False", 615}, {"await", 590}, {NULL, -1}, @@ -63,12 +63,12 @@ static KeywordToken *reserved_keywords[] = { {"import", 622}, {"assert", 529}, {"global", 526}, - {"except", 657}, + {"except", 655}, {"lambda", 612}, {NULL, -1}, }, (KeywordToken[]) { - {"finally", 653}, + {"finally", 651}, {NULL, -1}, }, (KeywordToken[]) { @@ -306,315 +306,314 @@ static char *soft_keywords[] = { #define invalid_group_type 1219 #define invalid_import_type 1220 #define invalid_import_from_targets_type 1221 -#define invalid_compound_stmt_type 1222 -#define invalid_with_stmt_type 1223 -#define invalid_with_stmt_indent_type 1224 -#define invalid_try_stmt_type 1225 -#define invalid_except_stmt_type 1226 -#define invalid_finally_stmt_type 1227 -#define invalid_except_stmt_indent_type 1228 -#define invalid_except_star_stmt_indent_type 1229 -#define invalid_match_stmt_type 1230 -#define invalid_case_block_type 1231 -#define invalid_as_pattern_type 1232 -#define invalid_class_pattern_type 1233 -#define invalid_class_argument_pattern_type 1234 -#define invalid_if_stmt_type 1235 -#define invalid_elif_stmt_type 1236 -#define invalid_else_stmt_type 1237 -#define invalid_while_stmt_type 1238 -#define invalid_for_stmt_type 1239 -#define invalid_def_raw_type 1240 -#define invalid_class_def_raw_type 1241 -#define invalid_double_starred_kvpairs_type 1242 -#define invalid_kvpair_type 1243 -#define invalid_starred_expression_type 1244 -#define invalid_replacement_field_type 1245 -#define invalid_conversion_character_type 1246 -#define invalid_arithmetic_type 1247 -#define invalid_factor_type 1248 -#define invalid_type_params_type 1249 -#define _loop0_1_type 1250 -#define _loop0_2_type 1251 -#define _loop1_3_type 1252 -#define _loop0_5_type 1253 -#define _gather_4_type 1254 -#define _tmp_6_type 1255 -#define _tmp_7_type 1256 -#define _tmp_8_type 1257 -#define _tmp_9_type 1258 -#define _tmp_10_type 1259 -#define _tmp_11_type 1260 -#define _tmp_12_type 1261 -#define _tmp_13_type 1262 -#define _loop1_14_type 1263 -#define _tmp_15_type 1264 -#define _tmp_16_type 1265 -#define _tmp_17_type 1266 -#define _loop0_19_type 1267 -#define _gather_18_type 1268 -#define _loop0_21_type 1269 -#define _gather_20_type 1270 -#define _tmp_22_type 1271 -#define _tmp_23_type 1272 -#define _loop0_24_type 1273 -#define _loop1_25_type 1274 -#define _loop0_27_type 1275 -#define _gather_26_type 1276 -#define _tmp_28_type 1277 -#define _loop0_30_type 1278 -#define _gather_29_type 1279 -#define _tmp_31_type 1280 -#define _loop1_32_type 1281 -#define _tmp_33_type 1282 -#define _tmp_34_type 1283 -#define _tmp_35_type 1284 -#define _loop0_36_type 1285 -#define _loop0_37_type 1286 -#define _loop0_38_type 1287 -#define _loop1_39_type 1288 -#define _loop0_40_type 1289 -#define _loop1_41_type 1290 -#define _loop1_42_type 1291 -#define _loop1_43_type 1292 -#define _loop0_44_type 1293 -#define _loop1_45_type 1294 -#define _loop0_46_type 1295 -#define _loop1_47_type 1296 -#define _loop0_48_type 1297 -#define _loop0_49_type 1298 -#define _loop1_50_type 1299 -#define _loop0_52_type 1300 -#define _gather_51_type 1301 -#define _loop0_54_type 1302 -#define _gather_53_type 1303 -#define _loop0_56_type 1304 -#define _gather_55_type 1305 -#define _loop0_58_type 1306 -#define _gather_57_type 1307 -#define _tmp_59_type 1308 -#define _loop1_60_type 1309 -#define _loop1_61_type 1310 -#define _tmp_62_type 1311 -#define _tmp_63_type 1312 -#define _loop1_64_type 1313 -#define _loop0_66_type 1314 -#define _gather_65_type 1315 -#define _tmp_67_type 1316 -#define _tmp_68_type 1317 -#define _tmp_69_type 1318 -#define _tmp_70_type 1319 -#define _loop0_72_type 1320 -#define _gather_71_type 1321 -#define _loop0_74_type 1322 -#define _gather_73_type 1323 -#define _tmp_75_type 1324 -#define _loop0_77_type 1325 -#define _gather_76_type 1326 -#define _loop0_79_type 1327 -#define _gather_78_type 1328 -#define _loop0_81_type 1329 -#define _gather_80_type 1330 -#define _loop1_82_type 1331 -#define _loop1_83_type 1332 -#define _loop0_85_type 1333 -#define _gather_84_type 1334 -#define _loop1_86_type 1335 -#define _loop1_87_type 1336 -#define _loop1_88_type 1337 -#define _tmp_89_type 1338 -#define _loop0_91_type 1339 -#define _gather_90_type 1340 -#define _tmp_92_type 1341 -#define _tmp_93_type 1342 -#define _tmp_94_type 1343 -#define _tmp_95_type 1344 -#define _tmp_96_type 1345 -#define _tmp_97_type 1346 -#define _loop0_98_type 1347 -#define _loop0_99_type 1348 -#define _loop0_100_type 1349 -#define _loop1_101_type 1350 -#define _loop0_102_type 1351 -#define _loop1_103_type 1352 -#define _loop1_104_type 1353 -#define _loop1_105_type 1354 -#define _loop0_106_type 1355 -#define _loop1_107_type 1356 -#define _loop0_108_type 1357 -#define _loop1_109_type 1358 -#define _loop0_110_type 1359 -#define _loop1_111_type 1360 -#define _loop0_112_type 1361 -#define _loop0_113_type 1362 -#define _loop1_114_type 1363 -#define _tmp_115_type 1364 -#define _loop0_117_type 1365 -#define _gather_116_type 1366 -#define _loop1_118_type 1367 -#define _loop0_119_type 1368 -#define _loop0_120_type 1369 -#define _tmp_121_type 1370 -#define _tmp_122_type 1371 -#define _loop0_124_type 1372 -#define _gather_123_type 1373 -#define _tmp_125_type 1374 -#define _loop0_127_type 1375 -#define _gather_126_type 1376 -#define _loop0_129_type 1377 -#define _gather_128_type 1378 -#define _loop0_131_type 1379 -#define _gather_130_type 1380 -#define _loop0_133_type 1381 -#define _gather_132_type 1382 -#define _loop0_134_type 1383 -#define _loop0_136_type 1384 -#define _gather_135_type 1385 -#define _loop1_137_type 1386 -#define _tmp_138_type 1387 -#define _loop0_140_type 1388 -#define _gather_139_type 1389 -#define _loop0_142_type 1390 -#define _gather_141_type 1391 -#define _loop0_144_type 1392 -#define _gather_143_type 1393 -#define _loop0_146_type 1394 -#define _gather_145_type 1395 -#define _loop0_148_type 1396 -#define _gather_147_type 1397 -#define _tmp_149_type 1398 -#define _tmp_150_type 1399 -#define _loop0_152_type 1400 -#define _gather_151_type 1401 -#define _tmp_153_type 1402 -#define _tmp_154_type 1403 -#define _tmp_155_type 1404 -#define _tmp_156_type 1405 -#define _tmp_157_type 1406 -#define _tmp_158_type 1407 -#define _tmp_159_type 1408 -#define _tmp_160_type 1409 -#define _tmp_161_type 1410 -#define _tmp_162_type 1411 -#define _loop0_163_type 1412 -#define _loop0_164_type 1413 -#define _loop0_165_type 1414 -#define _tmp_166_type 1415 -#define _tmp_167_type 1416 -#define _tmp_168_type 1417 -#define _tmp_169_type 1418 -#define _loop0_170_type 1419 -#define _loop0_171_type 1420 -#define _loop0_172_type 1421 -#define _loop1_173_type 1422 -#define _tmp_174_type 1423 -#define _loop0_175_type 1424 -#define _tmp_176_type 1425 -#define _loop0_177_type 1426 -#define _loop1_178_type 1427 -#define _tmp_179_type 1428 -#define _tmp_180_type 1429 -#define _tmp_181_type 1430 -#define _loop0_182_type 1431 -#define _tmp_183_type 1432 -#define _tmp_184_type 1433 -#define _loop1_185_type 1434 -#define _tmp_186_type 1435 -#define _loop0_187_type 1436 -#define _loop0_188_type 1437 -#define _loop0_189_type 1438 -#define _loop0_191_type 1439 -#define _gather_190_type 1440 -#define _tmp_192_type 1441 -#define _loop0_193_type 1442 -#define _tmp_194_type 1443 -#define _loop0_195_type 1444 -#define _loop1_196_type 1445 -#define _loop1_197_type 1446 -#define _tmp_198_type 1447 -#define _tmp_199_type 1448 -#define _loop0_200_type 1449 -#define _tmp_201_type 1450 -#define _tmp_202_type 1451 -#define _tmp_203_type 1452 -#define _loop0_205_type 1453 -#define _gather_204_type 1454 -#define _loop0_207_type 1455 -#define _gather_206_type 1456 -#define _loop0_209_type 1457 -#define _gather_208_type 1458 -#define _loop0_211_type 1459 -#define _gather_210_type 1460 -#define _loop0_213_type 1461 -#define _gather_212_type 1462 -#define _tmp_214_type 1463 -#define _loop0_215_type 1464 -#define _loop1_216_type 1465 -#define _tmp_217_type 1466 -#define _loop0_218_type 1467 -#define _loop1_219_type 1468 -#define _tmp_220_type 1469 -#define _tmp_221_type 1470 -#define _tmp_222_type 1471 -#define _tmp_223_type 1472 -#define _tmp_224_type 1473 -#define _tmp_225_type 1474 -#define _tmp_226_type 1475 -#define _tmp_227_type 1476 -#define _tmp_228_type 1477 -#define _tmp_229_type 1478 -#define _tmp_230_type 1479 -#define _loop0_232_type 1480 -#define _gather_231_type 1481 -#define _tmp_233_type 1482 -#define _tmp_234_type 1483 -#define _tmp_235_type 1484 -#define _tmp_236_type 1485 -#define _tmp_237_type 1486 -#define _tmp_238_type 1487 -#define _tmp_239_type 1488 -#define _loop0_240_type 1489 -#define _tmp_241_type 1490 -#define _tmp_242_type 1491 -#define _tmp_243_type 1492 -#define _tmp_244_type 1493 -#define _tmp_245_type 1494 -#define _tmp_246_type 1495 -#define _tmp_247_type 1496 -#define _tmp_248_type 1497 -#define _tmp_249_type 1498 -#define _tmp_250_type 1499 -#define _tmp_251_type 1500 -#define _tmp_252_type 1501 -#define _tmp_253_type 1502 -#define _tmp_254_type 1503 -#define _tmp_255_type 1504 -#define _tmp_256_type 1505 -#define _loop0_257_type 1506 -#define _tmp_258_type 1507 -#define _tmp_259_type 1508 -#define _tmp_260_type 1509 -#define _tmp_261_type 1510 -#define _tmp_262_type 1511 -#define _tmp_263_type 1512 -#define _tmp_264_type 1513 -#define _tmp_265_type 1514 -#define _tmp_266_type 1515 -#define _tmp_267_type 1516 -#define _tmp_268_type 1517 -#define _tmp_269_type 1518 -#define _tmp_270_type 1519 -#define _tmp_271_type 1520 -#define _tmp_272_type 1521 -#define _tmp_273_type 1522 -#define _loop0_275_type 1523 -#define _gather_274_type 1524 -#define _tmp_276_type 1525 -#define _tmp_277_type 1526 -#define _tmp_278_type 1527 -#define _tmp_279_type 1528 -#define _tmp_280_type 1529 -#define _tmp_281_type 1530 +#define invalid_with_stmt_type 1222 +#define invalid_with_stmt_indent_type 1223 +#define invalid_try_stmt_type 1224 +#define invalid_except_stmt_type 1225 +#define invalid_finally_stmt_type 1226 +#define invalid_except_stmt_indent_type 1227 +#define invalid_except_star_stmt_indent_type 1228 +#define invalid_match_stmt_type 1229 +#define invalid_case_block_type 1230 +#define invalid_as_pattern_type 1231 +#define invalid_class_pattern_type 1232 +#define invalid_class_argument_pattern_type 1233 +#define invalid_if_stmt_type 1234 +#define invalid_elif_stmt_type 1235 +#define invalid_else_stmt_type 1236 +#define invalid_while_stmt_type 1237 +#define invalid_for_stmt_type 1238 +#define invalid_def_raw_type 1239 +#define invalid_class_def_raw_type 1240 +#define invalid_double_starred_kvpairs_type 1241 +#define invalid_kvpair_type 1242 +#define invalid_starred_expression_type 1243 +#define invalid_replacement_field_type 1244 +#define invalid_conversion_character_type 1245 +#define invalid_arithmetic_type 1246 +#define invalid_factor_type 1247 +#define invalid_type_params_type 1248 +#define _loop0_1_type 1249 +#define _loop0_2_type 1250 +#define _loop1_3_type 1251 +#define _loop0_5_type 1252 +#define _gather_4_type 1253 +#define _tmp_6_type 1254 +#define _tmp_7_type 1255 +#define _tmp_8_type 1256 +#define _tmp_9_type 1257 +#define _tmp_10_type 1258 +#define _tmp_11_type 1259 +#define _tmp_12_type 1260 +#define _tmp_13_type 1261 +#define _loop1_14_type 1262 +#define _tmp_15_type 1263 +#define _tmp_16_type 1264 +#define _tmp_17_type 1265 +#define _loop0_19_type 1266 +#define _gather_18_type 1267 +#define _loop0_21_type 1268 +#define _gather_20_type 1269 +#define _tmp_22_type 1270 +#define _tmp_23_type 1271 +#define _loop0_24_type 1272 +#define _loop1_25_type 1273 +#define _loop0_27_type 1274 +#define _gather_26_type 1275 +#define _tmp_28_type 1276 +#define _loop0_30_type 1277 +#define _gather_29_type 1278 +#define _tmp_31_type 1279 +#define _loop1_32_type 1280 +#define _tmp_33_type 1281 +#define _tmp_34_type 1282 +#define _tmp_35_type 1283 +#define _loop0_36_type 1284 +#define _loop0_37_type 1285 +#define _loop0_38_type 1286 +#define _loop1_39_type 1287 +#define _loop0_40_type 1288 +#define _loop1_41_type 1289 +#define _loop1_42_type 1290 +#define _loop1_43_type 1291 +#define _loop0_44_type 1292 +#define _loop1_45_type 1293 +#define _loop0_46_type 1294 +#define _loop1_47_type 1295 +#define _loop0_48_type 1296 +#define _loop0_49_type 1297 +#define _loop1_50_type 1298 +#define _loop0_52_type 1299 +#define _gather_51_type 1300 +#define _loop0_54_type 1301 +#define _gather_53_type 1302 +#define _loop0_56_type 1303 +#define _gather_55_type 1304 +#define _loop0_58_type 1305 +#define _gather_57_type 1306 +#define _tmp_59_type 1307 +#define _loop1_60_type 1308 +#define _loop1_61_type 1309 +#define _tmp_62_type 1310 +#define _tmp_63_type 1311 +#define _loop1_64_type 1312 +#define _loop0_66_type 1313 +#define _gather_65_type 1314 +#define _tmp_67_type 1315 +#define _tmp_68_type 1316 +#define _tmp_69_type 1317 +#define _tmp_70_type 1318 +#define _loop0_72_type 1319 +#define _gather_71_type 1320 +#define _loop0_74_type 1321 +#define _gather_73_type 1322 +#define _tmp_75_type 1323 +#define _loop0_77_type 1324 +#define _gather_76_type 1325 +#define _loop0_79_type 1326 +#define _gather_78_type 1327 +#define _loop0_81_type 1328 +#define _gather_80_type 1329 +#define _loop1_82_type 1330 +#define _loop1_83_type 1331 +#define _loop0_85_type 1332 +#define _gather_84_type 1333 +#define _loop1_86_type 1334 +#define _loop1_87_type 1335 +#define _loop1_88_type 1336 +#define _tmp_89_type 1337 +#define _loop0_91_type 1338 +#define _gather_90_type 1339 +#define _tmp_92_type 1340 +#define _tmp_93_type 1341 +#define _tmp_94_type 1342 +#define _tmp_95_type 1343 +#define _tmp_96_type 1344 +#define _tmp_97_type 1345 +#define _loop0_98_type 1346 +#define _loop0_99_type 1347 +#define _loop0_100_type 1348 +#define _loop1_101_type 1349 +#define _loop0_102_type 1350 +#define _loop1_103_type 1351 +#define _loop1_104_type 1352 +#define _loop1_105_type 1353 +#define _loop0_106_type 1354 +#define _loop1_107_type 1355 +#define _loop0_108_type 1356 +#define _loop1_109_type 1357 +#define _loop0_110_type 1358 +#define _loop1_111_type 1359 +#define _loop0_112_type 1360 +#define _loop0_113_type 1361 +#define _loop1_114_type 1362 +#define _tmp_115_type 1363 +#define _loop0_117_type 1364 +#define _gather_116_type 1365 +#define _loop1_118_type 1366 +#define _loop0_119_type 1367 +#define _loop0_120_type 1368 +#define _tmp_121_type 1369 +#define _tmp_122_type 1370 +#define _loop0_124_type 1371 +#define _gather_123_type 1372 +#define _tmp_125_type 1373 +#define _loop0_127_type 1374 +#define _gather_126_type 1375 +#define _loop0_129_type 1376 +#define _gather_128_type 1377 +#define _loop0_131_type 1378 +#define _gather_130_type 1379 +#define _loop0_133_type 1380 +#define _gather_132_type 1381 +#define _loop0_134_type 1382 +#define _loop0_136_type 1383 +#define _gather_135_type 1384 +#define _loop1_137_type 1385 +#define _tmp_138_type 1386 +#define _loop0_140_type 1387 +#define _gather_139_type 1388 +#define _loop0_142_type 1389 +#define _gather_141_type 1390 +#define _loop0_144_type 1391 +#define _gather_143_type 1392 +#define _loop0_146_type 1393 +#define _gather_145_type 1394 +#define _loop0_148_type 1395 +#define _gather_147_type 1396 +#define _tmp_149_type 1397 +#define _tmp_150_type 1398 +#define _loop0_152_type 1399 +#define _gather_151_type 1400 +#define _tmp_153_type 1401 +#define _tmp_154_type 1402 +#define _tmp_155_type 1403 +#define _tmp_156_type 1404 +#define _tmp_157_type 1405 +#define _tmp_158_type 1406 +#define _tmp_159_type 1407 +#define _tmp_160_type 1408 +#define _tmp_161_type 1409 +#define _tmp_162_type 1410 +#define _loop0_163_type 1411 +#define _loop0_164_type 1412 +#define _loop0_165_type 1413 +#define _tmp_166_type 1414 +#define _tmp_167_type 1415 +#define _tmp_168_type 1416 +#define _tmp_169_type 1417 +#define _loop0_170_type 1418 +#define _loop0_171_type 1419 +#define _loop0_172_type 1420 +#define _loop1_173_type 1421 +#define _tmp_174_type 1422 +#define _loop0_175_type 1423 +#define _tmp_176_type 1424 +#define _loop0_177_type 1425 +#define _loop1_178_type 1426 +#define _tmp_179_type 1427 +#define _tmp_180_type 1428 +#define _tmp_181_type 1429 +#define _loop0_182_type 1430 +#define _tmp_183_type 1431 +#define _tmp_184_type 1432 +#define _loop1_185_type 1433 +#define _tmp_186_type 1434 +#define _loop0_187_type 1435 +#define _loop0_188_type 1436 +#define _loop0_189_type 1437 +#define _loop0_191_type 1438 +#define _gather_190_type 1439 +#define _tmp_192_type 1440 +#define _loop0_193_type 1441 +#define _tmp_194_type 1442 +#define _loop0_195_type 1443 +#define _loop1_196_type 1444 +#define _loop1_197_type 1445 +#define _tmp_198_type 1446 +#define _tmp_199_type 1447 +#define _loop0_200_type 1448 +#define _tmp_201_type 1449 +#define _tmp_202_type 1450 +#define _tmp_203_type 1451 +#define _loop0_205_type 1452 +#define _gather_204_type 1453 +#define _loop0_207_type 1454 +#define _gather_206_type 1455 +#define _loop0_209_type 1456 +#define _gather_208_type 1457 +#define _loop0_211_type 1458 +#define _gather_210_type 1459 +#define _loop0_213_type 1460 +#define _gather_212_type 1461 +#define _tmp_214_type 1462 +#define _loop0_215_type 1463 +#define _loop1_216_type 1464 +#define _tmp_217_type 1465 +#define _loop0_218_type 1466 +#define _loop1_219_type 1467 +#define _tmp_220_type 1468 +#define _tmp_221_type 1469 +#define _tmp_222_type 1470 +#define _tmp_223_type 1471 +#define _tmp_224_type 1472 +#define _tmp_225_type 1473 +#define _tmp_226_type 1474 +#define _tmp_227_type 1475 +#define _tmp_228_type 1476 +#define _tmp_229_type 1477 +#define _tmp_230_type 1478 +#define _loop0_232_type 1479 +#define _gather_231_type 1480 +#define _tmp_233_type 1481 +#define _tmp_234_type 1482 +#define _tmp_235_type 1483 +#define _tmp_236_type 1484 +#define _tmp_237_type 1485 +#define _tmp_238_type 1486 +#define _tmp_239_type 1487 +#define _loop0_240_type 1488 +#define _tmp_241_type 1489 +#define _tmp_242_type 1490 +#define _tmp_243_type 1491 +#define _tmp_244_type 1492 +#define _tmp_245_type 1493 +#define _tmp_246_type 1494 +#define _tmp_247_type 1495 +#define _tmp_248_type 1496 +#define _tmp_249_type 1497 +#define _tmp_250_type 1498 +#define _tmp_251_type 1499 +#define _tmp_252_type 1500 +#define _tmp_253_type 1501 +#define _tmp_254_type 1502 +#define _tmp_255_type 1503 +#define _tmp_256_type 1504 +#define _loop0_257_type 1505 +#define _tmp_258_type 1506 +#define _tmp_259_type 1507 +#define _tmp_260_type 1508 +#define _tmp_261_type 1509 +#define _tmp_262_type 1510 +#define _tmp_263_type 1511 +#define _tmp_264_type 1512 +#define _tmp_265_type 1513 +#define _tmp_266_type 1514 +#define _tmp_267_type 1515 +#define _tmp_268_type 1516 +#define _tmp_269_type 1517 +#define _tmp_270_type 1518 +#define _tmp_271_type 1519 +#define _tmp_272_type 1520 +#define _tmp_273_type 1521 +#define _loop0_275_type 1522 +#define _gather_274_type 1523 +#define _tmp_276_type 1524 +#define _tmp_277_type 1525 +#define _tmp_278_type 1526 +#define _tmp_279_type 1527 +#define _tmp_280_type 1528 +#define _tmp_281_type 1529 static mod_ty file_rule(Parser *p); static mod_ty interactive_rule(Parser *p); @@ -838,7 +837,6 @@ static void *invalid_for_target_rule(Parser *p); static void *invalid_group_rule(Parser *p); static void *invalid_import_rule(Parser *p); static void *invalid_import_from_targets_rule(Parser *p); -static void *invalid_compound_stmt_rule(Parser *p); static void *invalid_with_stmt_rule(Parser *p); static void *invalid_with_stmt_indent_rule(Parser *p); static void *invalid_try_stmt_rule(Parser *p); @@ -2056,7 +2054,6 @@ simple_stmt_rule(Parser *p) } // compound_stmt: -// | invalid_compound_stmt // | &('def' | '@' | 'async') function_def // | &'if' if_stmt // | &('class' | '@') class_def @@ -2077,25 +2074,6 @@ compound_stmt_rule(Parser *p) } stmt_ty _res = NULL; int _mark = p->mark; - if (p->call_invalid_rules) { // invalid_compound_stmt - if (p->error_indicator) { - p->level--; - return NULL; - } - D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_compound_stmt")); - void *invalid_compound_stmt_var; - if ( - (invalid_compound_stmt_var = invalid_compound_stmt_rule(p)) // invalid_compound_stmt - ) - { - D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_compound_stmt")); - _res = invalid_compound_stmt_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_compound_stmt")); - } { // &('def' | '@' | 'async') function_def if (p->error_indicator) { p->level--; @@ -2125,7 +2103,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'if' if_stmt")); stmt_ty if_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 662) // token='if' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 660) // token='if' && (if_stmt_var = if_stmt_rule(p)) // if_stmt ) @@ -2209,7 +2187,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'try' try_stmt")); stmt_ty try_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 644) // token='try' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 642) // token='try' && (try_stmt_var = try_stmt_rule(p)) // try_stmt ) @@ -2230,7 +2208,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'while' while_stmt")); stmt_ty while_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 667) // token='while' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 665) // token='while' && (while_stmt_var = while_stmt_rule(p)) // while_stmt ) @@ -4370,7 +4348,7 @@ class_def_raw_rule(Parser *p) asdl_stmt_seq* c; void *t; if ( - (_keyword = _PyPegen_expect_token(p, 679)) // token='class' + (_keyword = _PyPegen_expect_token(p, 677)) // token='class' && (a = _PyPegen_name_token(p)) // NAME && @@ -4537,7 +4515,7 @@ function_def_raw_rule(Parser *p) void *t; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 677)) // token='def' + (_keyword = _PyPegen_expect_token(p, 675)) // token='def' && (n = _PyPegen_name_token(p)) // NAME && @@ -4598,9 +4576,9 @@ function_def_raw_rule(Parser *p) void *t; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 676)) // token='async' + (_keyword = _PyPegen_expect_token(p, 674)) // token='async' && - (_keyword_1 = _PyPegen_expect_token(p, 677)) // token='def' + (_keyword_1 = _PyPegen_expect_token(p, 675)) // token='def' && (n = _PyPegen_name_token(p)) // NAME && @@ -5938,7 +5916,7 @@ if_stmt_rule(Parser *p) asdl_stmt_seq* b; stmt_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 662)) // token='if' + (_keyword = _PyPegen_expect_token(p, 660)) // token='if' && (a = named_expression_rule(p)) // named_expression && @@ -5983,7 +5961,7 @@ if_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 662)) // token='if' + (_keyword = _PyPegen_expect_token(p, 660)) // token='if' && (a = named_expression_rule(p)) // named_expression && @@ -6078,7 +6056,7 @@ elif_stmt_rule(Parser *p) asdl_stmt_seq* b; stmt_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 664)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 662)) // token='elif' && (a = named_expression_rule(p)) // named_expression && @@ -6123,7 +6101,7 @@ elif_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 664)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 662)) // token='elif' && (a = named_expression_rule(p)) // named_expression && @@ -6204,7 +6182,7 @@ else_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 665)) // token='else' + (_keyword = _PyPegen_expect_token(p, 663)) // token='else' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -6283,7 +6261,7 @@ while_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 667)) // token='while' + (_keyword = _PyPegen_expect_token(p, 665)) // token='while' && (a = named_expression_rule(p)) // named_expression && @@ -6383,11 +6361,11 @@ for_stmt_rule(Parser *p) expr_ty t; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 672)) // token='for' + (_keyword = _PyPegen_expect_token(p, 670)) // token='for' && (t = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 673)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 671)) // token='in' && (_cut_var = 1) && @@ -6445,13 +6423,13 @@ for_stmt_rule(Parser *p) expr_ty t; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 676)) // token='async' + (_keyword = _PyPegen_expect_token(p, 674)) // token='async' && - (_keyword_1 = _PyPegen_expect_token(p, 672)) // token='for' + (_keyword_1 = _PyPegen_expect_token(p, 670)) // token='for' && (t = star_targets_rule(p)) // star_targets && - (_keyword_2 = _PyPegen_expect_token(p, 673)) // token='in' + (_keyword_2 = _PyPegen_expect_token(p, 671)) // token='in' && (_cut_var = 1) && @@ -6580,7 +6558,7 @@ with_stmt_rule(Parser *p) asdl_stmt_seq* b; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 635)) // token='with' + (_keyword = _PyPegen_expect_token(p, 633)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -6631,7 +6609,7 @@ with_stmt_rule(Parser *p) asdl_stmt_seq* b; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 635)) // token='with' + (_keyword = _PyPegen_expect_token(p, 633)) // token='with' && (a = (asdl_withitem_seq*)_gather_53_rule(p)) // ','.with_item+ && @@ -6680,9 +6658,9 @@ with_stmt_rule(Parser *p) asdl_withitem_seq* a; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 676)) // token='async' + (_keyword = _PyPegen_expect_token(p, 674)) // token='async' && - (_keyword_1 = _PyPegen_expect_token(p, 635)) // token='with' + (_keyword_1 = _PyPegen_expect_token(p, 633)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -6732,9 +6710,9 @@ with_stmt_rule(Parser *p) asdl_stmt_seq* b; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 676)) // token='async' + (_keyword = _PyPegen_expect_token(p, 674)) // token='async' && - (_keyword_1 = _PyPegen_expect_token(p, 635)) // token='with' + (_keyword_1 = _PyPegen_expect_token(p, 633)) // token='with' && (a = (asdl_withitem_seq*)_gather_57_rule(p)) // ','.with_item+ && @@ -6820,7 +6798,7 @@ with_item_rule(Parser *p) if ( (e = expression_rule(p)) // expression && - (_keyword = _PyPegen_expect_token(p, 660)) // token='as' + (_keyword = _PyPegen_expect_token(p, 658)) // token='as' && (t = star_target_rule(p)) // star_target && @@ -6945,7 +6923,7 @@ try_stmt_rule(Parser *p) asdl_stmt_seq* b; asdl_stmt_seq* f; if ( - (_keyword = _PyPegen_expect_token(p, 644)) // token='try' + (_keyword = _PyPegen_expect_token(p, 642)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -6989,7 +6967,7 @@ try_stmt_rule(Parser *p) asdl_excepthandler_seq* ex; void *f; if ( - (_keyword = _PyPegen_expect_token(p, 644)) // token='try' + (_keyword = _PyPegen_expect_token(p, 642)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -7037,7 +7015,7 @@ try_stmt_rule(Parser *p) asdl_excepthandler_seq* ex; void *f; if ( - (_keyword = _PyPegen_expect_token(p, 644)) // token='try' + (_keyword = _PyPegen_expect_token(p, 642)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -7135,7 +7113,7 @@ except_block_rule(Parser *p) expr_ty e; void *t; if ( - (_keyword = _PyPegen_expect_token(p, 657)) // token='except' + (_keyword = _PyPegen_expect_token(p, 655)) // token='except' && (e = expression_rule(p)) // expression && @@ -7178,7 +7156,7 @@ except_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 657)) // token='except' + (_keyword = _PyPegen_expect_token(p, 655)) // token='except' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -7289,7 +7267,7 @@ except_star_block_rule(Parser *p) expr_ty e; void *t; if ( - (_keyword = _PyPegen_expect_token(p, 657)) // token='except' + (_keyword = _PyPegen_expect_token(p, 655)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -7391,7 +7369,7 @@ finally_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* a; if ( - (_keyword = _PyPegen_expect_token(p, 653)) // token='finally' + (_keyword = _PyPegen_expect_token(p, 651)) // token='finally' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -7699,7 +7677,7 @@ guard_rule(Parser *p) Token * _keyword; expr_ty guard; if ( - (_keyword = _PyPegen_expect_token(p, 662)) // token='if' + (_keyword = _PyPegen_expect_token(p, 660)) // token='if' && (guard = named_expression_rule(p)) // named_expression ) @@ -7894,7 +7872,7 @@ as_pattern_rule(Parser *p) if ( (pattern = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 660)) // token='as' + (_keyword = _PyPegen_expect_token(p, 658)) // token='as' && (target = pattern_capture_target_rule(p)) // pattern_capture_target ) @@ -11237,11 +11215,11 @@ expression_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 662)) // token='if' + (_keyword = _PyPegen_expect_token(p, 660)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 665)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 663)) // token='else' && (c = expression_rule(p)) // expression ) @@ -12123,7 +12101,7 @@ inversion_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 681)) // token='not' + (_keyword = _PyPegen_expect_token(p, 679)) // token='not' && (a = inversion_rule(p)) // inversion ) @@ -12777,9 +12755,9 @@ notin_bitwise_or_rule(Parser *p) Token * _keyword_1; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 681)) // token='not' + (_keyword = _PyPegen_expect_token(p, 679)) // token='not' && - (_keyword_1 = _PyPegen_expect_token(p, 673)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 671)) // token='in' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -12825,7 +12803,7 @@ in_bitwise_or_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 673)) // token='in' + (_keyword = _PyPegen_expect_token(p, 671)) // token='in' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -12874,7 +12852,7 @@ isnot_bitwise_or_rule(Parser *p) if ( (_keyword = _PyPegen_expect_token(p, 589)) // token='is' && - (_keyword_1 = _PyPegen_expect_token(p, 681)) // token='not' + (_keyword_1 = _PyPegen_expect_token(p, 679)) // token='not' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -17045,13 +17023,13 @@ for_if_clause_rule(Parser *p) expr_ty b; asdl_expr_seq* c; if ( - (_keyword = _PyPegen_expect_token(p, 676)) // token='async' + (_keyword = _PyPegen_expect_token(p, 674)) // token='async' && - (_keyword_1 = _PyPegen_expect_token(p, 672)) // token='for' + (_keyword_1 = _PyPegen_expect_token(p, 670)) // token='for' && (a = star_targets_rule(p)) // star_targets && - (_keyword_2 = _PyPegen_expect_token(p, 673)) // token='in' + (_keyword_2 = _PyPegen_expect_token(p, 671)) // token='in' && (_cut_var = 1) && @@ -17090,11 +17068,11 @@ for_if_clause_rule(Parser *p) expr_ty b; asdl_expr_seq* c; if ( - (_keyword = _PyPegen_expect_token(p, 672)) // token='for' + (_keyword = _PyPegen_expect_token(p, 670)) // token='for' && (a = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 673)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 671)) // token='in' && (_cut_var = 1) && @@ -17131,13 +17109,13 @@ for_if_clause_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings void *_tmp_121_var; if ( - (_opt_var = _PyPegen_expect_token(p, 676), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 674), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 672)) // token='for' + (_keyword = _PyPegen_expect_token(p, 670)) // token='for' && (_tmp_121_var = _tmp_121_rule(p)) // bitwise_or ((',' bitwise_or))* ','? && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 673) // token='in' + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 671) // token='in' ) { D(fprintf(stderr, "%*c+ for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'? 'for' (bitwise_or ((',' bitwise_or))* ','?) !'in'")); @@ -20411,11 +20389,11 @@ expression_without_invalid_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 662)) // token='if' + (_keyword = _PyPegen_expect_token(p, 660)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 665)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 663)) // token='else' && (c = expression_rule(p)) // expression ) @@ -20596,7 +20574,7 @@ invalid_expression_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 662)) // token='if' + (_keyword = _PyPegen_expect_token(p, 660)) // token='if' && (b = disjunction_rule(p)) // disjunction && @@ -22534,7 +22512,7 @@ invalid_with_item_rule(Parser *p) if ( (expression_var = expression_rule(p)) // expression && - (_keyword = _PyPegen_expect_token(p, 660)) // token='as' + (_keyword = _PyPegen_expect_token(p, 658)) // token='as' && (a = expression_rule(p)) // expression && @@ -22584,9 +22562,9 @@ invalid_for_target_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings expr_ty a; if ( - (_opt_var = _PyPegen_expect_token(p, 676), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 674), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 672)) // token='for' + (_keyword = _PyPegen_expect_token(p, 670)) // token='for' && (a = star_expressions_rule(p)) // star_expressions ) @@ -22844,82 +22822,6 @@ invalid_import_from_targets_rule(Parser *p) return _res; } -// invalid_compound_stmt: 'elif' named_expression ':' | 'else' ':' -static void * -invalid_compound_stmt_rule(Parser *p) -{ - if (p->level++ == MAXSTACK) { - _Pypegen_stack_overflow(p); - } - if (p->error_indicator) { - p->level--; - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // 'elif' named_expression ':' - if (p->error_indicator) { - p->level--; - return NULL; - } - D(fprintf(stderr, "%*c> invalid_compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'elif' named_expression ':'")); - Token * _literal; - Token * a; - expr_ty named_expression_var; - if ( - (a = _PyPegen_expect_token(p, 664)) // token='elif' - && - (named_expression_var = named_expression_rule(p)) // named_expression - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - ) - { - D(fprintf(stderr, "%*c+ invalid_compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'elif' named_expression ':'")); - _res = RAISE_SYNTAX_ERROR_STARTING_FROM ( a , "'elif' must match an if-statement here" ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - p->level--; - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s invalid_compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'elif' named_expression ':'")); - } - { // 'else' ':' - if (p->error_indicator) { - p->level--; - return NULL; - } - D(fprintf(stderr, "%*c> invalid_compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else' ':'")); - Token * _literal; - Token * a; - if ( - (a = _PyPegen_expect_token(p, 665)) // token='else' - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - ) - { - D(fprintf(stderr, "%*c+ invalid_compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else' ':'")); - _res = RAISE_SYNTAX_ERROR_STARTING_FROM ( a , "'else' must match a valid statement here" ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - p->level--; - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s invalid_compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'else' ':'")); - } - _res = NULL; - done: - p->level--; - return _res; -} - // invalid_with_stmt: // | 'async'? 'with' ','.(expression ['as' star_target])+ NEWLINE // | 'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE @@ -22947,9 +22849,9 @@ invalid_with_stmt_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, 676), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 674), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 635)) // token='with' + (_keyword = _PyPegen_expect_token(p, 633)) // token='with' && (_gather_206_var = _gather_206_rule(p)) // ','.(expression ['as' star_target])+ && @@ -22985,9 +22887,9 @@ invalid_with_stmt_rule(Parser *p) UNUSED(_opt_var_1); // Silence compiler warnings Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, 676), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 674), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 635)) // token='with' + (_keyword = _PyPegen_expect_token(p, 633)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -23047,9 +22949,9 @@ invalid_with_stmt_indent_rule(Parser *p) Token * a; Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, 676), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 674), !p->error_indicator) // 'async'? && - (a = _PyPegen_expect_token(p, 635)) // token='with' + (a = _PyPegen_expect_token(p, 633)) // token='with' && (_gather_210_var = _gather_210_rule(p)) // ','.(expression ['as' star_target])+ && @@ -23090,9 +22992,9 @@ invalid_with_stmt_indent_rule(Parser *p) Token * a; Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, 676), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 674), !p->error_indicator) // 'async'? && - (a = _PyPegen_expect_token(p, 635)) // token='with' + (a = _PyPegen_expect_token(p, 633)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -23155,7 +23057,7 @@ invalid_try_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 644)) // token='try' + (a = _PyPegen_expect_token(p, 642)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -23187,7 +23089,7 @@ invalid_try_stmt_rule(Parser *p) Token * _literal; asdl_stmt_seq* block_var; if ( - (_keyword = _PyPegen_expect_token(p, 644)) // token='try' + (_keyword = _PyPegen_expect_token(p, 642)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -23226,7 +23128,7 @@ invalid_try_stmt_rule(Parser *p) Token * b; expr_ty expression_var; if ( - (_keyword = _PyPegen_expect_token(p, 644)) // token='try' + (_keyword = _PyPegen_expect_token(p, 642)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -23234,7 +23136,7 @@ invalid_try_stmt_rule(Parser *p) && (_loop1_216_var = _loop1_216_rule(p)) // except_block+ && - (a = _PyPegen_expect_token(p, 657)) // token='except' + (a = _PyPegen_expect_token(p, 655)) // token='except' && (b = _PyPegen_expect_token(p, 16)) // token='*' && @@ -23273,7 +23175,7 @@ invalid_try_stmt_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings Token * a; if ( - (_keyword = _PyPegen_expect_token(p, 644)) // token='try' + (_keyword = _PyPegen_expect_token(p, 642)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -23281,7 +23183,7 @@ invalid_try_stmt_rule(Parser *p) && (_loop1_219_var = _loop1_219_rule(p)) // except_star_block+ && - (a = _PyPegen_expect_token(p, 657)) // token='except' + (a = _PyPegen_expect_token(p, 655)) // token='except' && (_opt_var = _tmp_220_rule(p), !p->error_indicator) // [expression ['as' NAME]] && @@ -23340,7 +23242,7 @@ invalid_except_stmt_rule(Parser *p) expr_ty a; expr_ty expressions_var; if ( - (_keyword = _PyPegen_expect_token(p, 657)) // token='except' + (_keyword = _PyPegen_expect_token(p, 655)) // token='except' && (_opt_var = _PyPegen_expect_token(p, 16), !p->error_indicator) // '*'? && @@ -23382,7 +23284,7 @@ invalid_except_stmt_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 657)) // token='except' + (a = _PyPegen_expect_token(p, 655)) // token='except' && (_opt_var = _PyPegen_expect_token(p, 16), !p->error_indicator) // '*'? && @@ -23415,7 +23317,7 @@ invalid_except_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 657)) // token='except' + (a = _PyPegen_expect_token(p, 655)) // token='except' && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) @@ -23443,7 +23345,7 @@ invalid_except_stmt_rule(Parser *p) void *_tmp_223_var; Token * a; if ( - (a = _PyPegen_expect_token(p, 657)) // token='except' + (a = _PyPegen_expect_token(p, 655)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -23492,7 +23394,7 @@ invalid_finally_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 653)) // token='finally' + (a = _PyPegen_expect_token(p, 651)) // token='finally' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -23548,7 +23450,7 @@ invalid_except_stmt_indent_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 657)) // token='except' + (a = _PyPegen_expect_token(p, 655)) // token='except' && (expression_var = expression_rule(p)) // expression && @@ -23584,7 +23486,7 @@ invalid_except_stmt_indent_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 657)) // token='except' + (a = _PyPegen_expect_token(p, 655)) // token='except' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -23640,7 +23542,7 @@ invalid_except_star_stmt_indent_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 657)) // token='except' + (a = _PyPegen_expect_token(p, 655)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -23879,7 +23781,7 @@ invalid_as_pattern_rule(Parser *p) if ( (or_pattern_var = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 660)) // token='as' + (_keyword = _PyPegen_expect_token(p, 658)) // token='as' && (a = _PyPegen_expect_soft_keyword(p, "_")) // soft_keyword='"_"' ) @@ -23909,7 +23811,7 @@ invalid_as_pattern_rule(Parser *p) if ( (or_pattern_var = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 660)) // token='as' + (_keyword = _PyPegen_expect_token(p, 658)) // token='as' && _PyPegen_lookahead_with_name(0, _PyPegen_name_token, p) && @@ -24063,7 +23965,7 @@ invalid_if_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 662)) // token='if' + (_keyword = _PyPegen_expect_token(p, 660)) // token='if' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -24094,7 +23996,7 @@ invalid_if_stmt_rule(Parser *p) expr_ty a_1; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 662)) // token='if' + (a = _PyPegen_expect_token(p, 660)) // token='if' && (a_1 = named_expression_rule(p)) // named_expression && @@ -24149,7 +24051,7 @@ invalid_elif_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 664)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 662)) // token='elif' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -24180,7 +24082,7 @@ invalid_elif_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 664)) // token='elif' + (a = _PyPegen_expect_token(p, 662)) // token='elif' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -24233,7 +24135,7 @@ invalid_else_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 665)) // token='else' + (a = _PyPegen_expect_token(p, 663)) // token='else' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -24286,7 +24188,7 @@ invalid_while_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 667)) // token='while' + (_keyword = _PyPegen_expect_token(p, 665)) // token='while' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -24317,7 +24219,7 @@ invalid_while_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 667)) // token='while' + (a = _PyPegen_expect_token(p, 665)) // token='while' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -24376,13 +24278,13 @@ invalid_for_stmt_rule(Parser *p) expr_ty star_expressions_var; expr_ty star_targets_var; if ( - (_opt_var = _PyPegen_expect_token(p, 676), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 674), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 672)) // token='for' + (_keyword = _PyPegen_expect_token(p, 670)) // token='for' && (star_targets_var = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 673)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 671)) // token='in' && (star_expressions_var = star_expressions_rule(p)) // star_expressions && @@ -24417,13 +24319,13 @@ invalid_for_stmt_rule(Parser *p) expr_ty star_expressions_var; expr_ty star_targets_var; if ( - (_opt_var = _PyPegen_expect_token(p, 676), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 674), !p->error_indicator) // 'async'? && - (a = _PyPegen_expect_token(p, 672)) // token='for' + (a = _PyPegen_expect_token(p, 670)) // token='for' && (star_targets_var = star_targets_rule(p)) // star_targets && - (_keyword = _PyPegen_expect_token(p, 673)) // token='in' + (_keyword = _PyPegen_expect_token(p, 671)) // token='in' && (star_expressions_var = star_expressions_rule(p)) // star_expressions && @@ -24489,9 +24391,9 @@ invalid_def_raw_rule(Parser *p) expr_ty name_var; Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, 676), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 674), !p->error_indicator) // 'async'? && - (a = _PyPegen_expect_token(p, 677)) // token='def' + (a = _PyPegen_expect_token(p, 675)) // token='def' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -24548,9 +24450,9 @@ invalid_def_raw_rule(Parser *p) asdl_stmt_seq* block_var; expr_ty name_var; if ( - (_opt_var = _PyPegen_expect_token(p, 676), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 674), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 677)) // token='def' + (_keyword = _PyPegen_expect_token(p, 675)) // token='def' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -24614,7 +24516,7 @@ invalid_class_def_raw_rule(Parser *p) expr_ty name_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 679)) // token='class' + (_keyword = _PyPegen_expect_token(p, 677)) // token='class' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -24653,7 +24555,7 @@ invalid_class_def_raw_rule(Parser *p) expr_ty name_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 679)) // token='class' + (a = _PyPegen_expect_token(p, 677)) // token='class' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -25428,7 +25330,7 @@ invalid_arithmetic_rule(Parser *p) && (_tmp_243_var = _tmp_243_rule(p)) // '+' | '-' | '*' | '/' | '%' | '//' | '@' && - (a = _PyPegen_expect_token(p, 681)) // token='not' + (a = _PyPegen_expect_token(p, 679)) // token='not' && (b = inversion_rule(p)) // inversion ) @@ -25477,7 +25379,7 @@ invalid_factor_rule(Parser *p) if ( (_tmp_244_var = _tmp_244_rule(p)) // '+' | '-' | '~' && - (a = _PyPegen_expect_token(p, 681)) // token='not' + (a = _PyPegen_expect_token(p, 679)) // token='not' && (b = factor_rule(p)) // factor ) @@ -25948,7 +25850,7 @@ _tmp_7_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'def'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 677)) // token='def' + (_keyword = _PyPegen_expect_token(p, 675)) // token='def' ) { D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'def'")); @@ -25986,7 +25888,7 @@ _tmp_7_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 676)) // token='async' + (_keyword = _PyPegen_expect_token(p, 674)) // token='async' ) { D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'")); @@ -26024,7 +25926,7 @@ _tmp_8_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'class'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 679)) // token='class' + (_keyword = _PyPegen_expect_token(p, 677)) // token='class' ) { D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'class'")); @@ -26081,7 +25983,7 @@ _tmp_9_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 635)) // token='with' + (_keyword = _PyPegen_expect_token(p, 633)) // token='with' ) { D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with'")); @@ -26100,7 +26002,7 @@ _tmp_9_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 676)) // token='async' + (_keyword = _PyPegen_expect_token(p, 674)) // token='async' ) { D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'")); @@ -26138,7 +26040,7 @@ _tmp_10_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 672)) // token='for' + (_keyword = _PyPegen_expect_token(p, 670)) // token='for' ) { D(fprintf(stderr, "%*c+ _tmp_10[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for'")); @@ -26157,7 +26059,7 @@ _tmp_10_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 676)) // token='async' + (_keyword = _PyPegen_expect_token(p, 674)) // token='async' ) { D(fprintf(stderr, "%*c+ _tmp_10[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'")); @@ -27181,7 +27083,7 @@ _tmp_28_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 660)) // token='as' + (_keyword = _PyPegen_expect_token(p, 658)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -27344,7 +27246,7 @@ _tmp_31_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 660)) // token='as' + (_keyword = _PyPegen_expect_token(p, 658)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -29331,7 +29233,7 @@ _tmp_62_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 660)) // token='as' + (_keyword = _PyPegen_expect_token(p, 658)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -29377,7 +29279,7 @@ _tmp_63_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 660)) // token='as' + (_keyword = _PyPegen_expect_token(p, 658)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -35264,7 +35166,7 @@ _tmp_159_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 665)) // token='else' + (_keyword = _PyPegen_expect_token(p, 663)) // token='else' ) { D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else'")); @@ -38800,7 +38702,7 @@ _tmp_214_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_214[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 657)) // token='except' + (_keyword = _PyPegen_expect_token(p, 655)) // token='except' ) { D(fprintf(stderr, "%*c+ _tmp_214[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except'")); @@ -38819,7 +38721,7 @@ _tmp_214_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_214[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'finally'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 653)) // token='finally' + (_keyword = _PyPegen_expect_token(p, 651)) // token='finally' ) { D(fprintf(stderr, "%*c+ _tmp_214[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'finally'")); @@ -38997,7 +38899,7 @@ _tmp_217_rule(Parser *p) Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 660)) // token='as' + (_keyword = _PyPegen_expect_token(p, 658)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) @@ -39219,7 +39121,7 @@ _tmp_221_rule(Parser *p) Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 660)) // token='as' + (_keyword = _PyPegen_expect_token(p, 658)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) @@ -39260,7 +39162,7 @@ _tmp_222_rule(Parser *p) Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 660)) // token='as' + (_keyword = _PyPegen_expect_token(p, 658)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) @@ -39358,7 +39260,7 @@ _tmp_224_rule(Parser *p) Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 660)) // token='as' + (_keyword = _PyPegen_expect_token(p, 658)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) @@ -39399,7 +39301,7 @@ _tmp_225_rule(Parser *p) Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 660)) // token='as' + (_keyword = _PyPegen_expect_token(p, 658)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) @@ -41094,7 +40996,7 @@ _tmp_255_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 662)) // token='if' + (_keyword = _PyPegen_expect_token(p, 660)) // token='if' && (z = disjunction_rule(p)) // disjunction ) @@ -41140,7 +41042,7 @@ _tmp_256_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 662)) // token='if' + (_keyword = _PyPegen_expect_token(p, 660)) // token='if' && (z = disjunction_rule(p)) // disjunction ) @@ -41853,7 +41755,7 @@ _tmp_271_rule(Parser *p) Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 660)) // token='as' + (_keyword = _PyPegen_expect_token(p, 658)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) @@ -42112,7 +42014,7 @@ _tmp_276_rule(Parser *p) Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 660)) // token='as' + (_keyword = _PyPegen_expect_token(p, 658)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) @@ -42153,7 +42055,7 @@ _tmp_277_rule(Parser *p) Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 660)) // token='as' + (_keyword = _PyPegen_expect_token(p, 658)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) @@ -42194,7 +42096,7 @@ _tmp_278_rule(Parser *p) Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 660)) // token='as' + (_keyword = _PyPegen_expect_token(p, 658)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) @@ -42235,7 +42137,7 @@ _tmp_279_rule(Parser *p) Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 660)) // token='as' + (_keyword = _PyPegen_expect_token(p, 658)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) diff --git a/Parser/pegen.c b/Parser/pegen.c index 3d3e64559403b1..2955eab2dac7c4 100644 --- a/Parser/pegen.c +++ b/Parser/pegen.c @@ -18,6 +18,31 @@ _PyPegen_interactive_exit(Parser *p) return NULL; } +Py_ssize_t +_PyPegen_byte_offset_to_character_offset_line(PyObject *line, Py_ssize_t col_offset, Py_ssize_t end_col_offset) +{ + const char *data = PyUnicode_AsUTF8(line); + + Py_ssize_t len = 0; + while (col_offset < end_col_offset) { + Py_UCS4 ch = data[col_offset]; + if (ch < 0x80) { + col_offset += 1; + } else if ((ch & 0xe0) == 0xc0) { + col_offset += 2; + } else if ((ch & 0xf0) == 0xe0) { + col_offset += 3; + } else if ((ch & 0xf8) == 0xf0) { + col_offset += 4; + } else { + PyErr_SetString(PyExc_ValueError, "Invalid UTF-8 sequence"); + return -1; + } + len++; + } + return len; +} + Py_ssize_t _PyPegen_byte_offset_to_character_offset_raw(const char* str, Py_ssize_t col_offset) { diff --git a/Parser/pegen.h b/Parser/pegen.h index 57b45a54c36c57..32c64e7774b878 100644 --- a/Parser/pegen.h +++ b/Parser/pegen.h @@ -148,6 +148,7 @@ int _PyPegen_fill_token(Parser *p); expr_ty _PyPegen_name_token(Parser *p); expr_ty _PyPegen_number_token(Parser *p); void *_PyPegen_string_token(Parser *p); +Py_ssize_t _PyPegen_byte_offset_to_character_offset_line(PyObject *line, Py_ssize_t col_offset, Py_ssize_t end_col_offset); Py_ssize_t _PyPegen_byte_offset_to_character_offset(PyObject *line, Py_ssize_t col_offset); Py_ssize_t _PyPegen_byte_offset_to_character_offset_raw(const char*, Py_ssize_t col_offset); diff --git a/Python/Python-ast.c b/Python/Python-ast.c index 4956d04f719de9..7aa1c5119d8f28 100644 --- a/Python/Python-ast.c +++ b/Python/Python-ast.c @@ -5221,6 +5221,13 @@ ast_type_init(PyObject *self, PyObject *args, PyObject *kw) goto set_remaining_cleanup; } } + else if (type == state->expr_context_type) { + // special case for expr_context: default to Load() + res = PyObject_SetAttr(self, name, state->Load_singleton); + if (res < 0) { + goto set_remaining_cleanup; + } + } else { // simple field (e.g., identifier) if (PyErr_WarnFormat( diff --git a/Python/Python-tokenize.c b/Python/Python-tokenize.c index 41e8107e205b46..09fad18b5b4df7 100644 --- a/Python/Python-tokenize.c +++ b/Python/Python-tokenize.c @@ -32,6 +32,11 @@ typedef struct { PyObject_HEAD struct tok_state *tok; int done; + + /* Needed to cache line for performance */ + PyObject *last_line; + Py_ssize_t last_lineno; + Py_ssize_t byte_col_offset_diff; } tokenizeriterobject; /*[clinic input] @@ -68,6 +73,11 @@ tokenizeriter_new_impl(PyTypeObject *type, PyObject *readline, self->tok->tok_extra_tokens = 1; } self->done = 0; + + self->last_line = NULL; + self->byte_col_offset_diff = 0; + self->last_lineno = 0; + return (PyObject *)self; } @@ -210,7 +220,18 @@ tokenizeriter_next(tokenizeriterobject *it) if (size >= 1 && it->tok->implicit_newline) { size -= 1; } - line = PyUnicode_DecodeUTF8(line_start, size, "replace"); + + if (it->tok->lineno != it->last_lineno) { + // Line has changed since last token, so we fetch the new line and cache it + // in the iter object. + Py_XDECREF(it->last_line); + line = PyUnicode_DecodeUTF8(line_start, size, "replace"); + it->last_line = line; + it->byte_col_offset_diff = 0; + } else { + // Line hasn't changed so we reuse the cached one. + line = it->last_line; + } } if (line == NULL) { Py_DECREF(str); @@ -219,13 +240,28 @@ tokenizeriter_next(tokenizeriterobject *it) Py_ssize_t lineno = ISSTRINGLIT(type) ? it->tok->first_lineno : it->tok->lineno; Py_ssize_t end_lineno = it->tok->lineno; + it->last_lineno = lineno; + Py_ssize_t col_offset = -1; Py_ssize_t end_col_offset = -1; + Py_ssize_t byte_offset = -1; if (token.start != NULL && token.start >= line_start) { - col_offset = _PyPegen_byte_offset_to_character_offset(line, token.start - line_start); + byte_offset = token.start - line_start; + col_offset = byte_offset - it->byte_col_offset_diff; } if (token.end != NULL && token.end >= it->tok->line_start) { - end_col_offset = _PyPegen_byte_offset_to_character_offset_raw(it->tok->line_start, token.end - it->tok->line_start); + Py_ssize_t end_byte_offset = token.end - it->tok->line_start; + if (lineno == end_lineno) { + // If the whole token is at the same line, we can just use the token.start + // buffer for figuring out the new column offset, since using line is not + // performant for very long lines. + Py_ssize_t token_col_offset = _PyPegen_byte_offset_to_character_offset_line(line, byte_offset, end_byte_offset); + end_col_offset = col_offset + token_col_offset; + it->byte_col_offset_diff += token.end - token.start - token_col_offset; + } else { + end_col_offset = _PyPegen_byte_offset_to_character_offset_raw(it->tok->line_start, end_byte_offset); + it->byte_col_offset_diff += end_byte_offset - end_col_offset; + } } if (it->tok->tok_extra_tokens) { @@ -262,7 +298,7 @@ tokenizeriter_next(tokenizeriterobject *it) } } - result = Py_BuildValue("(iN(nn)(nn)N)", type, str, lineno, col_offset, end_lineno, end_col_offset, line); + result = Py_BuildValue("(iN(nn)(nn)O)", type, str, lineno, col_offset, end_lineno, end_col_offset, line); exit: _PyToken_Free(&token); if (type == ENDMARKER) { @@ -275,6 +311,7 @@ static void tokenizeriter_dealloc(tokenizeriterobject *it) { PyTypeObject *tp = Py_TYPE(it); + Py_XDECREF(it->last_line); _PyTokenizer_Free(it->tok); tp->tp_free(it); Py_DECREF(tp); diff --git a/Python/_warnings.c b/Python/_warnings.c index 793cbc657f3184..17404d33c1cc9b 100644 --- a/Python/_warnings.c +++ b/Python/_warnings.c @@ -569,10 +569,9 @@ call_show_warning(PyThreadState *tstate, PyObject *category, PyObject *show_fn, *msg, *res, *warnmsg_cls = NULL; PyInterpreterState *interp = tstate->interp; - /* If the source parameter is set, try to get the Python implementation. - The Python implementation is able to log the traceback where the source + /* The Python implementation is able to log the traceback where the source was allocated, whereas the C implementation doesn't. */ - show_fn = GET_WARNINGS_ATTR(interp, _showwarnmsg, source != NULL); + show_fn = GET_WARNINGS_ATTR(interp, _showwarnmsg, 1); if (show_fn == NULL) { if (PyErr_Occurred()) return -1; diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c index d192d5be751cfc..351fc8462a5ddc 100644 --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -870,15 +870,15 @@ builtin_compile_impl(PyObject *module, PyObject *source, PyObject *filename, // gh-118527: Disable immortalization of code constants for explicit // compile() calls to get consistent frozen outputs between the default // and free-threaded builds. + // Subtract two to suppress immortalization (so that 1 -> -1) PyInterpreterState *interp = _PyInterpreterState_GET(); - int old_value = interp->gc.immortalize.enable_on_thread_created; - interp->gc.immortalize.enable_on_thread_created = 0; + _Py_atomic_add_int(&interp->gc.immortalize, -2); #endif result = Py_CompileStringObject(str, filename, start[compile_mode], &cf, optimize); #ifdef Py_GIL_DISABLED - interp->gc.immortalize.enable_on_thread_created = old_value; + _Py_atomic_add_int(&interp->gc.immortalize, 2); #endif Py_XDECREF(source_copy); diff --git a/Python/bytecodes.c b/Python/bytecodes.c index 55eda9711dea1f..0dc60bbbb6a579 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -292,11 +292,9 @@ dummy_func( /* Need to create a fake StopIteration error here, * to conform to PEP 380 */ if (PyGen_Check(receiver)) { - PyErr_SetObject(PyExc_StopIteration, value); - if (monitor_stop_iteration(tstate, frame, this_instr)) { + if (monitor_stop_iteration(tstate, frame, this_instr, value)) { ERROR_NO_POP(); } - PyErr_SetRaisedException(NULL); } DECREF_INPUTS(); } @@ -307,11 +305,9 @@ dummy_func( tier1 inst(INSTRUMENTED_END_SEND, (receiver, value -- value)) { if (PyGen_Check(receiver) || PyCoro_CheckExact(receiver)) { - PyErr_SetObject(PyExc_StopIteration, value); - if (monitor_stop_iteration(tstate, frame, this_instr)) { + if (monitor_stop_iteration(tstate, frame, this_instr, value)) { ERROR_NO_POP(); } - PyErr_SetRaisedException(NULL); } Py_DECREF(receiver); } @@ -1379,18 +1375,35 @@ dummy_func( ERROR_NO_POP(); } if (v == NULL) { - if (PyDict_GetItemRef(GLOBALS(), name, &v) < 0) { - ERROR_NO_POP(); - } - if (v == NULL) { - if (PyMapping_GetOptionalItem(BUILTINS(), name, &v) < 0) { + if (PyDict_CheckExact(GLOBALS()) + && PyDict_CheckExact(BUILTINS())) + { + v = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(), + (PyDictObject *)BUILTINS(), + name); + if (v == NULL) { + if (!_PyErr_Occurred(tstate)) { + /* _PyDict_LoadGlobal() returns NULL without raising + * an exception if the key doesn't exist */ + _PyEval_FormatExcCheckArg(tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + } ERROR_NO_POP(); } + } + else { + /* Slow-path if globals or builtins is not a dict */ + /* namespace 1: globals */ + ERROR_IF(PyMapping_GetOptionalItem(GLOBALS(), name, &v) < 0, error); if (v == NULL) { - _PyEval_FormatExcCheckArg( - tstate, PyExc_NameError, - NAME_ERROR_MSG, name); - ERROR_NO_POP(); + /* namespace 2: builtins */ + ERROR_IF(PyMapping_GetOptionalItem(BUILTINS(), name, &v) < 0, error); + if (v == NULL) { + _PyEval_FormatExcCheckArg( + tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + ERROR_IF(true, error); + } } } } @@ -1547,7 +1560,7 @@ dummy_func( inst(MAKE_CELL, (--)) { // "initial" is probably NULL but not if it's an arg (or set - // via PyFrame_LocalsToFast() before MAKE_CELL has run). + // via the f_locals proxy before MAKE_CELL has run). PyObject *initial = GETLOCAL(oparg); PyObject *cell = PyCell_New(initial); if (cell == NULL) { diff --git a/Python/ceval.c b/Python/ceval.c index 128e0417a9fd63..324d062fe9bb43 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -231,7 +231,8 @@ static void monitor_reraise(PyThreadState *tstate, _Py_CODEUNIT *instr); static int monitor_stop_iteration(PyThreadState *tstate, _PyInterpreterFrame *frame, - _Py_CODEUNIT *instr); + _Py_CODEUNIT *instr, + PyObject *value); static void monitor_unwind(PyThreadState *tstate, _PyInterpreterFrame *frame, _Py_CODEUNIT *instr); @@ -2215,12 +2216,19 @@ monitor_reraise(PyThreadState *tstate, _PyInterpreterFrame *frame, static int monitor_stop_iteration(PyThreadState *tstate, _PyInterpreterFrame *frame, - _Py_CODEUNIT *instr) + _Py_CODEUNIT *instr, PyObject *value) { if (no_tools_for_local_event(tstate, frame, PY_MONITORING_EVENT_STOP_ITERATION)) { return 0; } - return do_monitor_exc(tstate, frame, instr, PY_MONITORING_EVENT_STOP_ITERATION); + assert(!PyErr_Occurred()); + PyErr_SetObject(PyExc_StopIteration, value); + int res = do_monitor_exc(tstate, frame, instr, PY_MONITORING_EVENT_STOP_ITERATION); + if (res < 0) { + return res; + } + PyErr_SetRaisedException(NULL); + return 0; } static void diff --git a/Python/ceval_gil.c b/Python/ceval_gil.c index 7a54c185303cd1..5617504a495686 100644 --- a/Python/ceval_gil.c +++ b/Python/ceval_gil.c @@ -205,32 +205,39 @@ static void recreate_gil(struct _gil_runtime_state *gil) } #endif -static void -drop_gil_impl(struct _gil_runtime_state *gil) +static inline void +drop_gil_impl(PyThreadState *tstate, struct _gil_runtime_state *gil) { MUTEX_LOCK(gil->mutex); _Py_ANNOTATE_RWLOCK_RELEASED(&gil->locked, /*is_write=*/1); _Py_atomic_store_int_relaxed(&gil->locked, 0); + if (tstate != NULL) { + tstate->_status.holds_gil = 0; + } COND_SIGNAL(gil->cond); MUTEX_UNLOCK(gil->mutex); } static void -drop_gil(PyInterpreterState *interp, PyThreadState *tstate) +drop_gil(PyInterpreterState *interp, PyThreadState *tstate, int final_release) { struct _ceval_state *ceval = &interp->ceval; - /* If tstate is NULL, the caller is indicating that we're releasing + /* If final_release is true, the caller is indicating that we're releasing the GIL for the last time in this thread. This is particularly relevant when the current thread state is finalizing or its interpreter is finalizing (either may be in an inconsistent state). In that case the current thread will definitely never try to acquire the GIL again. */ // XXX It may be more correct to check tstate->_status.finalizing. - // XXX assert(tstate == NULL || !tstate->_status.cleared); + // XXX assert(final_release || !tstate->_status.cleared); + assert(final_release || tstate != NULL); struct _gil_runtime_state *gil = ceval->gil; #ifdef Py_GIL_DISABLED - if (!_Py_atomic_load_int_relaxed(&gil->enabled)) { + // Check if we have the GIL before dropping it. tstate will be NULL if + // take_gil() detected that this thread has been destroyed, in which case + // we know we have the GIL. + if (tstate != NULL && !tstate->_status.holds_gil) { return; } #endif @@ -238,26 +245,23 @@ drop_gil(PyInterpreterState *interp, PyThreadState *tstate) Py_FatalError("drop_gil: GIL is not locked"); } - /* tstate is allowed to be NULL (early interpreter init) */ - if (tstate != NULL) { + if (!final_release) { /* Sub-interpreter support: threads might have been switched under our feet using PyThreadState_Swap(). Fix the GIL last holder variable so that our heuristics work. */ _Py_atomic_store_ptr_relaxed(&gil->last_holder, tstate); } - drop_gil_impl(gil); + drop_gil_impl(tstate, gil); #ifdef FORCE_SWITCHING - /* We check tstate first in case we might be releasing the GIL for - the last time in this thread. In that case there's a possible - race with tstate->interp getting deleted after gil->mutex is - unlocked and before the following code runs, leading to a crash. - We can use (tstate == NULL) to indicate the thread is done with - the GIL, and that's the only time we might delete the - interpreter, so checking tstate first prevents the crash. - See https://github.com/python/cpython/issues/104341. */ - if (tstate != NULL && + /* We might be releasing the GIL for the last time in this thread. In that + case there's a possible race with tstate->interp getting deleted after + gil->mutex is unlocked and before the following code runs, leading to a + crash. We can use final_release to indicate the thread is done with the + GIL, and that's the only time we might delete the interpreter. See + https://github.com/python/cpython/issues/104341. */ + if (!final_release && _Py_eval_breaker_bit_is_set(tstate, _PY_GIL_DROP_REQUEST_BIT)) { MUTEX_LOCK(gil->switch_mutex); /* Not switched yet => wait */ @@ -284,7 +288,7 @@ drop_gil(PyInterpreterState *interp, PyThreadState *tstate) tstate must be non-NULL. Returns 1 if the GIL was acquired, or 0 if not. */ -static int +static void take_gil(PyThreadState *tstate) { int err = errno; @@ -309,7 +313,7 @@ take_gil(PyThreadState *tstate) struct _gil_runtime_state *gil = interp->ceval.gil; #ifdef Py_GIL_DISABLED if (!_Py_atomic_load_int_relaxed(&gil->enabled)) { - return 0; + return; } #endif @@ -358,10 +362,10 @@ take_gil(PyThreadState *tstate) if (!_Py_atomic_load_int_relaxed(&gil->enabled)) { // Another thread disabled the GIL between our check above and // now. Don't take the GIL, signal any other waiting threads, and - // return 0. + // return. COND_SIGNAL(gil->cond); MUTEX_UNLOCK(gil->mutex); - return 0; + return; } #endif @@ -393,20 +397,21 @@ take_gil(PyThreadState *tstate) in take_gil() while the main thread called wait_for_thread_shutdown() from Py_Finalize(). */ MUTEX_UNLOCK(gil->mutex); - /* Passing NULL to drop_gil() indicates that this thread is about to - terminate and will never hold the GIL again. */ - drop_gil(interp, NULL); + /* tstate could be a dangling pointer, so don't pass it to + drop_gil(). */ + drop_gil(interp, NULL, 1); PyThread_exit_thread(); } assert(_PyThreadState_CheckConsistency(tstate)); + tstate->_status.holds_gil = 1; _Py_unset_eval_breaker_bit(tstate, _PY_GIL_DROP_REQUEST_BIT); update_eval_breaker_for_thread(interp, tstate); MUTEX_UNLOCK(gil->mutex); errno = err; - return 1; + return; } void _PyEval_SetSwitchInterval(unsigned long microseconds) @@ -451,10 +456,17 @@ PyEval_ThreadsInitialized(void) static inline int current_thread_holds_gil(struct _gil_runtime_state *gil, PyThreadState *tstate) { - if (((PyThreadState*)_Py_atomic_load_ptr_relaxed(&gil->last_holder)) != tstate) { - return 0; - } - return _Py_atomic_load_int_relaxed(&gil->locked); + int holds_gil = tstate->_status.holds_gil; + + // holds_gil is the source of truth; check that last_holder and gil->locked + // are consistent with it. + int locked = _Py_atomic_load_int_relaxed(&gil->locked); + int is_last_holder = + ((PyThreadState*)_Py_atomic_load_ptr_relaxed(&gil->last_holder)) == tstate; + assert(!holds_gil || locked); + assert(!holds_gil || is_last_holder); + + return holds_gil; } #endif @@ -563,23 +575,24 @@ PyEval_ReleaseLock(void) /* This function must succeed when the current thread state is NULL. We therefore avoid PyThreadState_Get() which dumps a fatal error in debug mode. */ - drop_gil(tstate->interp, tstate); + drop_gil(tstate->interp, tstate, 0); } -int +void _PyEval_AcquireLock(PyThreadState *tstate) { _Py_EnsureTstateNotNULL(tstate); - return take_gil(tstate); + take_gil(tstate); } void -_PyEval_ReleaseLock(PyInterpreterState *interp, PyThreadState *tstate) +_PyEval_ReleaseLock(PyInterpreterState *interp, + PyThreadState *tstate, + int final_release) { - /* If tstate is NULL then we do not expect the current thread - to acquire the GIL ever again. */ - assert(tstate == NULL || tstate->interp == interp); - drop_gil(interp, tstate); + assert(tstate != NULL); + assert(tstate->interp == interp); + drop_gil(interp, tstate, final_release); } void @@ -1136,7 +1149,12 @@ _PyEval_DisableGIL(PyThreadState *tstate) // // Drop the GIL, which will wake up any threads waiting in take_gil() // and let them resume execution without the GIL. - drop_gil_impl(gil); + drop_gil_impl(tstate, gil); + + // If another thread asked us to drop the GIL, they should be + // free-threading by now. Remove any such request so we have a clean + // slate if/when the GIL is enabled again. + _Py_unset_eval_breaker_bit(tstate, _PY_GIL_DROP_REQUEST_BIT); return 1; } return 0; diff --git a/Python/compile.c b/Python/compile.c index 79f3baadca6b4a..949b9642c75d22 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -1001,7 +1001,7 @@ static int compiler_addop_name(struct compiler_unit *u, location loc, int opcode, PyObject *dict, PyObject *o) { - PyObject *mangled = _Py_Mangle(u->u_private, o); + PyObject *mangled = _Py_MaybeMangle(u->u_private, u->u_ste, o); if (!mangled) { return ERROR; } @@ -1873,7 +1873,7 @@ compiler_visit_kwonlydefaults(struct compiler *c, location loc, arg_ty arg = asdl_seq_GET(kwonlyargs, i); expr_ty default_ = asdl_seq_GET(kw_defaults, i); if (default_) { - PyObject *mangled = _Py_Mangle(c->u->u_private, arg->arg); + PyObject *mangled = _Py_MaybeMangle(c->u->u_private, c->u->u_ste, arg->arg); if (!mangled) { goto error; } @@ -1930,7 +1930,7 @@ compiler_visit_argannotation(struct compiler *c, identifier id, if (!annotation) { return SUCCESS; } - PyObject *mangled = _Py_Mangle(c->u->u_private, id); + PyObject *mangled = _Py_MaybeMangle(c->u->u_private, c->u->u_ste, id); if (!mangled) { return ERROR; } @@ -2625,7 +2625,6 @@ compiler_class(struct compiler *c, stmt_ty s) asdl_type_param_seq *type_params = s->v.ClassDef.type_params; int is_generic = asdl_seq_LEN(type_params) > 0; if (is_generic) { - Py_XSETREF(c->u->u_private, Py_NewRef(s->v.ClassDef.name)); PyObject *type_params_name = PyUnicode_FromFormat("", s->v.ClassDef.name); if (!type_params_name) { @@ -2637,6 +2636,7 @@ compiler_class(struct compiler *c, stmt_ty s) return ERROR; } Py_DECREF(type_params_name); + Py_XSETREF(c->u->u_private, Py_NewRef(s->v.ClassDef.name)); RETURN_IF_ERROR_IN_SCOPE(c, compiler_type_params(c, type_params)); _Py_DECLARE_STR(type_params, ".type_params"); RETURN_IF_ERROR_IN_SCOPE(c, compiler_nameop(c, loc, &_Py_STR(type_params), Store)); @@ -4203,7 +4203,7 @@ compiler_nameop(struct compiler *c, location loc, return ERROR; } - mangled = _Py_Mangle(c->u->u_private, name); + mangled = _Py_MaybeMangle(c->u->u_private, c->u->u_ste, name); if (!mangled) { return ERROR; } @@ -6512,7 +6512,7 @@ compiler_annassign(struct compiler *c, stmt_ty s) VISIT(c, expr, s->v.AnnAssign.annotation); } ADDOP_NAME(c, loc, LOAD_NAME, &_Py_ID(__annotations__), names); - mangled = _Py_Mangle(c->u->u_private, targ->v.Name.id); + mangled = _Py_MaybeMangle(c->u->u_private, c->u->u_ste, targ->v.Name.id); ADDOP_LOAD_CONST_NEW(c, loc, mangled); ADDOP(c, loc, STORE_SUBSCR); } diff --git a/Python/crossinterp_exceptions.h b/Python/crossinterp_exceptions.h index 6ecc10c7955fd8..278511da615c75 100644 --- a/Python/crossinterp_exceptions.h +++ b/Python/crossinterp_exceptions.h @@ -90,6 +90,6 @@ static void fini_exceptions(PyInterpreterState *interp) { // Likewise with _fini_not_shareable_error_type(). - _PyStaticType_Dealloc(interp, &_PyExc_InterpreterNotFoundError); - _PyStaticType_Dealloc(interp, &_PyExc_InterpreterError); + _PyStaticType_FiniBuiltin(interp, &_PyExc_InterpreterNotFoundError); + _PyStaticType_FiniBuiltin(interp, &_PyExc_InterpreterError); } diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index 347a1e677a0832..4e0f73f7e23a0a 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -1394,35 +1394,7 @@ break; } - case _LOAD_FROM_DICT_OR_GLOBALS: { - PyObject *mod_or_class_dict; - PyObject *v; - oparg = CURRENT_OPARG(); - mod_or_class_dict = stack_pointer[-1]; - PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); - if (PyMapping_GetOptionalItem(mod_or_class_dict, name, &v) < 0) { - JUMP_TO_ERROR(); - } - if (v == NULL) { - if (PyDict_GetItemRef(GLOBALS(), name, &v) < 0) { - JUMP_TO_ERROR(); - } - if (v == NULL) { - if (PyMapping_GetOptionalItem(BUILTINS(), name, &v) < 0) { - JUMP_TO_ERROR(); - } - if (v == NULL) { - _PyEval_FormatExcCheckArg( - tstate, PyExc_NameError, - NAME_ERROR_MSG, name); - JUMP_TO_ERROR(); - } - } - } - Py_DECREF(mod_or_class_dict); - stack_pointer[-1] = v; - break; - } + /* _LOAD_FROM_DICT_OR_GLOBALS is not a viable micro-op for tier 2 because it has both popping and not-popping errors */ /* _LOAD_NAME is not a viable micro-op for tier 2 because it has both popping and not-popping errors */ @@ -1558,7 +1530,7 @@ case _MAKE_CELL: { oparg = CURRENT_OPARG(); // "initial" is probably NULL but not if it's an arg (or set - // via PyFrame_LocalsToFast() before MAKE_CELL has run). + // via the f_locals proxy before MAKE_CELL has run). PyObject *initial = GETLOCAL(oparg); PyObject *cell = PyCell_New(initial); if (cell == NULL) { diff --git a/Python/gc_free_threading.c b/Python/gc_free_threading.c index ee006bb4aa12b7..f19362c9573812 100644 --- a/Python/gc_free_threading.c +++ b/Python/gc_free_threading.c @@ -86,7 +86,7 @@ worklist_pop(struct worklist *worklist) PyObject *op = (PyObject *)worklist->head; if (op != NULL) { worklist->head = op->ob_tid; - op->ob_tid = 0; + _Py_atomic_store_uintptr_relaxed(&op->ob_tid, 0); } return op; } @@ -189,6 +189,7 @@ merge_refcount(PyObject *op, Py_ssize_t extra) static void gc_restore_tid(PyObject *op) { + assert(_PyInterpreterState_GET()->stoptheworld.world_stopped); mi_segment_t *segment = _mi_ptr_segment(op); if (_Py_REF_IS_MERGED(op->ob_ref_shared)) { op->ob_tid = 0; @@ -251,6 +252,10 @@ gc_visit_heaps_lock_held(PyInterpreterState *interp, mi_block_visit_fun *visitor // visit each thread's heaps for GC objects for (PyThreadState *p = interp->threads.head; p != NULL; p = p->next) { struct _mimalloc_thread_state *m = &((_PyThreadStateImpl *)p)->mimalloc; + if (!_Py_atomic_load_int(&m->initialized)) { + // The thread may not have called tstate_mimalloc_bind() yet. + continue; + } arg->offset = offset_base; if (!mi_heap_visit_blocks(&m->heaps[_Py_MIMALLOC_HEAP_GC], true, @@ -676,7 +681,6 @@ call_weakref_callbacks(struct collection_state *state) Py_DECREF(temp); } - gc_restore_tid(op); Py_DECREF(op); // drop worklist reference } } @@ -703,11 +707,9 @@ _PyGC_Init(PyInterpreterState *interp) { GCState *gcstate = &interp->gc; - if (_Py_IsMainInterpreter(interp)) { - // gh-117783: immortalize objects that would use deferred refcounting - // once the first non-main thread is created. - gcstate->immortalize.enable_on_thread_created = 1; - } + // gh-117783: immortalize objects that would use deferred refcounting + // once the first non-main thread is created (but not in subinterpreters). + gcstate->immortalize = _Py_IsMainInterpreter(interp) ? 0 : -1; gcstate->garbage = PyList_New(0); if (gcstate->garbage == NULL) { @@ -986,7 +988,6 @@ cleanup_worklist(struct worklist *worklist) { PyObject *op; while ((op = worklist_pop(worklist)) != NULL) { - gc_restore_tid(op); gc_clear_unreachable(op); Py_DECREF(op); } @@ -1809,8 +1810,10 @@ _PyGC_ImmortalizeDeferredObjects(PyInterpreterState *interp) { struct visitor_args args; _PyEval_StopTheWorld(interp); - gc_visit_heaps(interp, &immortalize_visitor, &args); - interp->gc.immortalize.enabled = 1; + if (interp->gc.immortalize == 0) { + gc_visit_heaps(interp, &immortalize_visitor, &args); + interp->gc.immortalize = 1; + } _PyEval_StartTheWorld(interp); } diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index 8b8112209cc78a..ace91a3ca9b624 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -3260,11 +3260,9 @@ /* Need to create a fake StopIteration error here, * to conform to PEP 380 */ if (PyGen_Check(receiver)) { - PyErr_SetObject(PyExc_StopIteration, value); - if (monitor_stop_iteration(tstate, frame, this_instr)) { + if (monitor_stop_iteration(tstate, frame, this_instr, value)) { goto error; } - PyErr_SetRaisedException(NULL); } Py_DECREF(value); stack_pointer += -1; @@ -3281,11 +3279,9 @@ value = stack_pointer[-1]; receiver = stack_pointer[-2]; if (PyGen_Check(receiver) || PyCoro_CheckExact(receiver)) { - PyErr_SetObject(PyExc_StopIteration, value); - if (monitor_stop_iteration(tstate, frame, this_instr)) { + if (monitor_stop_iteration(tstate, frame, this_instr, value)) { goto error; } - PyErr_SetRaisedException(NULL); } Py_DECREF(receiver); stack_pointer[-2] = value; @@ -4385,18 +4381,35 @@ goto error; } if (v == NULL) { - if (PyDict_GetItemRef(GLOBALS(), name, &v) < 0) { - goto error; - } - if (v == NULL) { - if (PyMapping_GetOptionalItem(BUILTINS(), name, &v) < 0) { + if (PyDict_CheckExact(GLOBALS()) + && PyDict_CheckExact(BUILTINS())) + { + v = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(), + (PyDictObject *)BUILTINS(), + name); + if (v == NULL) { + if (!_PyErr_Occurred(tstate)) { + /* _PyDict_LoadGlobal() returns NULL without raising + * an exception if the key doesn't exist */ + _PyEval_FormatExcCheckArg(tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + } goto error; } + } + else { + /* Slow-path if globals or builtins is not a dict */ + /* namespace 1: globals */ + if (PyMapping_GetOptionalItem(GLOBALS(), name, &v) < 0) goto pop_1_error; if (v == NULL) { - _PyEval_FormatExcCheckArg( - tstate, PyExc_NameError, - NAME_ERROR_MSG, name); - goto error; + /* namespace 2: builtins */ + if (PyMapping_GetOptionalItem(BUILTINS(), name, &v) < 0) goto pop_1_error; + if (v == NULL) { + _PyEval_FormatExcCheckArg( + tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + if (true) goto pop_1_error; + } } } } @@ -4751,7 +4764,7 @@ next_instr += 1; INSTRUCTION_STATS(MAKE_CELL); // "initial" is probably NULL but not if it's an arg (or set - // via PyFrame_LocalsToFast() before MAKE_CELL has run). + // via the f_locals proxy before MAKE_CELL has run). PyObject *initial = GETLOCAL(oparg); PyObject *cell = PyCell_New(initial); if (cell == NULL) { diff --git a/Python/getargs.c b/Python/getargs.c index 539925e471f54c..f9a836679fe55e 100644 --- a/Python/getargs.c +++ b/Python/getargs.c @@ -7,6 +7,7 @@ #include "pycore_dict.h" // _PyDict_HasOnlyStringKeys() #include "pycore_modsupport.h" // export _PyArg_NoKeywords() #include "pycore_pylifecycle.h" // _PyArg_Fini +#include "pycore_pystate.h" // _Py_IsMainInterpreter() #include "pycore_tuple.h" // _PyTuple_ITEMS() #include "pycore_pyerrors.h" // _Py_CalculateSuggestions() @@ -1947,7 +1948,23 @@ _parser_init(void *arg) int owned; PyObject *kwtuple = parser->kwtuple; if (kwtuple == NULL) { + /* We may temporarily switch to the main interpreter to avoid + * creating a tuple that could outlive its owning interpreter. */ + PyThreadState *save_tstate = NULL; + PyThreadState *temp_tstate = NULL; + if (!_Py_IsMainInterpreter(PyInterpreterState_Get())) { + temp_tstate = PyThreadState_New(_PyInterpreterState_Main()); + if (temp_tstate == NULL) { + return -1; + } + save_tstate = PyThreadState_Swap(temp_tstate); + } kwtuple = new_kwtuple(keywords, len, pos); + if (temp_tstate != NULL) { + PyThreadState_Clear(temp_tstate); + (void)PyThreadState_Swap(save_tstate); + PyThreadState_Delete(temp_tstate); + } if (kwtuple == NULL) { return -1; } @@ -1969,8 +1986,8 @@ _parser_init(void *arg) parser->next = _Py_atomic_load_ptr(&_PyRuntime.getargs.static_parsers); do { // compare-exchange updates parser->next on failure - } while (_Py_atomic_compare_exchange_ptr(&_PyRuntime.getargs.static_parsers, - &parser->next, parser)); + } while (!_Py_atomic_compare_exchange_ptr(&_PyRuntime.getargs.static_parsers, + &parser->next, parser)); return 0; } diff --git a/Python/getversion.c b/Python/getversion.c index 5db836ab4bfd6d..226b2f999a6bfd 100644 --- a/Python/getversion.c +++ b/Python/getversion.c @@ -6,7 +6,7 @@ #include "patchlevel.h" static int initialized = 0; -static char version[250]; +static char version[300]; void _Py_InitVersion(void) { @@ -14,7 +14,12 @@ void _Py_InitVersion(void) return; } initialized = 1; - PyOS_snprintf(version, sizeof(version), "%.80s (%.80s) %.80s", +#ifdef Py_GIL_DISABLED + const char *buildinfo_format = "%.80s experimental free-threading build (%.80s) %.80s"; +#else + const char *buildinfo_format = "%.80s (%.80s) %.80s"; +#endif + PyOS_snprintf(version, sizeof(version), buildinfo_format, PY_VERSION, Py_GetBuildInfo(), Py_GetCompiler()); } diff --git a/Python/import.c b/Python/import.c index ba44477318d473..10ac49fd95fc59 100644 --- a/Python/import.c +++ b/Python/import.c @@ -94,11 +94,7 @@ static struct _inittab *inittab_copy = NULL; (interp)->imports.import_func #define IMPORT_LOCK(interp) \ - (interp)->imports.lock.mutex -#define IMPORT_LOCK_THREAD(interp) \ - (interp)->imports.lock.thread -#define IMPORT_LOCK_LEVEL(interp) \ - (interp)->imports.lock.level + (interp)->imports.lock #define FIND_AND_LOAD(interp) \ (interp)->imports.find_and_load @@ -115,75 +111,15 @@ static struct _inittab *inittab_copy = NULL; void _PyImport_AcquireLock(PyInterpreterState *interp) { - unsigned long me = PyThread_get_thread_ident(); - if (me == PYTHREAD_INVALID_THREAD_ID) - return; /* Too bad */ - if (IMPORT_LOCK(interp) == NULL) { - IMPORT_LOCK(interp) = PyThread_allocate_lock(); - if (IMPORT_LOCK(interp) == NULL) - return; /* Nothing much we can do. */ - } - if (IMPORT_LOCK_THREAD(interp) == me) { - IMPORT_LOCK_LEVEL(interp)++; - return; - } - if (IMPORT_LOCK_THREAD(interp) != PYTHREAD_INVALID_THREAD_ID || - !PyThread_acquire_lock(IMPORT_LOCK(interp), 0)) - { - PyThreadState *tstate = PyEval_SaveThread(); - PyThread_acquire_lock(IMPORT_LOCK(interp), WAIT_LOCK); - PyEval_RestoreThread(tstate); - } - assert(IMPORT_LOCK_LEVEL(interp) == 0); - IMPORT_LOCK_THREAD(interp) = me; - IMPORT_LOCK_LEVEL(interp) = 1; + _PyRecursiveMutex_Lock(&IMPORT_LOCK(interp)); } -int +void _PyImport_ReleaseLock(PyInterpreterState *interp) { - unsigned long me = PyThread_get_thread_ident(); - if (me == PYTHREAD_INVALID_THREAD_ID || IMPORT_LOCK(interp) == NULL) - return 0; /* Too bad */ - if (IMPORT_LOCK_THREAD(interp) != me) - return -1; - IMPORT_LOCK_LEVEL(interp)--; - assert(IMPORT_LOCK_LEVEL(interp) >= 0); - if (IMPORT_LOCK_LEVEL(interp) == 0) { - IMPORT_LOCK_THREAD(interp) = PYTHREAD_INVALID_THREAD_ID; - PyThread_release_lock(IMPORT_LOCK(interp)); - } - return 1; + _PyRecursiveMutex_Unlock(&IMPORT_LOCK(interp)); } -#ifdef HAVE_FORK -/* This function is called from PyOS_AfterFork_Child() to ensure that newly - created child processes do not share locks with the parent. - We now acquire the import lock around fork() calls but on some platforms - (Solaris 9 and earlier? see isue7242) that still left us with problems. */ -PyStatus -_PyImport_ReInitLock(PyInterpreterState *interp) -{ - if (IMPORT_LOCK(interp) != NULL) { - if (_PyThread_at_fork_reinit(&IMPORT_LOCK(interp)) < 0) { - return _PyStatus_ERR("failed to create a new lock"); - } - } - - if (IMPORT_LOCK_LEVEL(interp) > 1) { - /* Forked as a side effect of import */ - unsigned long me = PyThread_get_thread_ident(); - PyThread_acquire_lock(IMPORT_LOCK(interp), WAIT_LOCK); - IMPORT_LOCK_THREAD(interp) = me; - IMPORT_LOCK_LEVEL(interp)--; - } else { - IMPORT_LOCK_THREAD(interp) = PYTHREAD_INVALID_THREAD_ID; - IMPORT_LOCK_LEVEL(interp) = 0; - } - return _PyStatus_OK(); -} -#endif - /***************/ /* sys.modules */ @@ -457,7 +393,6 @@ static Py_ssize_t _get_module_index_from_def(PyModuleDef *def) { Py_ssize_t index = def->m_base.m_index; - assert(index > 0); #ifndef NDEBUG struct extensions_cache_value *cached = _find_cached_def(def); assert(cached == NULL || index == _get_cached_module_index(cached)); @@ -489,13 +424,13 @@ _set_module_index(PyModuleDef *def, Py_ssize_t index) static const char * _modules_by_index_check(PyInterpreterState *interp, Py_ssize_t index) { - if (index == 0) { + if (index <= 0) { return "invalid module index"; } if (MODULES_BY_INDEX(interp) == NULL) { return "Interpreters module-list not accessible."; } - if (index > PyList_GET_SIZE(MODULES_BY_INDEX(interp))) { + if (index >= PyList_GET_SIZE(MODULES_BY_INDEX(interp))) { return "Module index out of bounds."; } return NULL; @@ -2184,7 +2119,7 @@ clear_singlephase_extension(PyInterpreterState *interp, /* Clear data set when the module was initially loaded. */ def->m_base.m_init = NULL; Py_CLEAR(def->m_base.m_copy); - // We leave m_index alone since there's no reason to reset it. + def->m_base.m_index = 0; /* Clear the PyState_*Module() cache entry. */ Py_ssize_t index = _get_cached_module_index(cached); @@ -4112,11 +4047,6 @@ _PyImport_FiniCore(PyInterpreterState *interp) PyErr_FormatUnraisable("Exception ignored on clearing sys.modules"); } - if (IMPORT_LOCK(interp) != NULL) { - PyThread_free_lock(IMPORT_LOCK(interp)); - IMPORT_LOCK(interp) = NULL; - } - _PyImport_ClearCore(interp); } @@ -4249,8 +4179,7 @@ _imp_lock_held_impl(PyObject *module) /*[clinic end generated code: output=8b89384b5e1963fc input=9b088f9b217d9bdf]*/ { PyInterpreterState *interp = _PyInterpreterState_GET(); - return PyBool_FromLong( - IMPORT_LOCK_THREAD(interp) != PYTHREAD_INVALID_THREAD_ID); + return PyBool_FromLong(PyMutex_IsLocked(&IMPORT_LOCK(interp).mutex)); } /*[clinic input] @@ -4284,11 +4213,12 @@ _imp_release_lock_impl(PyObject *module) /*[clinic end generated code: output=7faab6d0be178b0a input=934fb11516dd778b]*/ { PyInterpreterState *interp = _PyInterpreterState_GET(); - if (_PyImport_ReleaseLock(interp) < 0) { + if (!_PyRecursiveMutex_IsLockedByCurrentThread(&IMPORT_LOCK(interp))) { PyErr_SetString(PyExc_RuntimeError, "not holding the import lock"); return NULL; } + _PyImport_ReleaseLock(interp); Py_RETURN_NONE; } diff --git a/Python/instrumentation.c b/Python/instrumentation.c index 77d3489afcfc72..a5211ee5428cf8 100644 --- a/Python/instrumentation.c +++ b/Python/instrumentation.c @@ -893,7 +893,7 @@ remove_per_instruction_tools(PyCodeObject * code, int offset, int tools) static int call_one_instrument( PyInterpreterState *interp, PyThreadState *tstate, PyObject **args, - Py_ssize_t nargsf, int8_t tool, int event) + size_t nargsf, int8_t tool, int event) { assert(0 <= tool && tool < 8); assert(tstate->tracing == 0); @@ -1084,7 +1084,7 @@ call_instrumentation_vector( args[2] = offset_obj; PyInterpreterState *interp = tstate->interp; uint8_t tools = get_tools_for_instruction(code, interp, offset, event); - Py_ssize_t nargsf = nargs | PY_VECTORCALL_ARGUMENTS_OFFSET; + size_t nargsf = (size_t) nargs | PY_VECTORCALL_ARGUMENTS_OFFSET; PyObject **callargs = &args[1]; int err = 0; while (tools) { @@ -2439,13 +2439,15 @@ capi_call_instrumentation(PyMonitoringState *state, PyObject *codelike, int32_t PyErr_SetString(PyExc_ValueError, "offset must be non-negative"); return -1; } - PyObject *offset_obj = PyLong_FromLong(offset); - if (offset_obj == NULL) { - return -1; + if (event != PY_MONITORING_EVENT_LINE) { + PyObject *offset_obj = PyLong_FromLong(offset); + if (offset_obj == NULL) { + return -1; + } + assert(args[2] == NULL); + args[2] = offset_obj; } - assert(args[2] == NULL); - args[2] = offset_obj; - Py_ssize_t nargsf = nargs | PY_VECTORCALL_ARGUMENTS_OFFSET; + size_t nargsf = (size_t) nargs | PY_VECTORCALL_ARGUMENTS_OFFSET; PyObject **callargs = &args[1]; int err = 0; @@ -2565,8 +2567,8 @@ _PyMonitoring_FireLineEvent(PyMonitoringState *state, PyObject *codelike, int32_ if (lno == NULL) { return -1; } - PyObject *args[4] = { NULL, NULL, NULL, lno }; - int res= capi_call_instrumentation(state, codelike, offset, args, 3, + PyObject *args[3] = { NULL, NULL, lno }; + int res= capi_call_instrumentation(state, codelike, offset, args, 2, PY_MONITORING_EVENT_LINE); Py_DECREF(lno); return res; @@ -2622,7 +2624,7 @@ exception_event_teardown(int err, PyObject *exc) { } else { assert(PyErr_Occurred()); - Py_DECREF(exc); + Py_XDECREF(exc); } return err; } @@ -2712,15 +2714,18 @@ _PyMonitoring_FirePyUnwindEvent(PyMonitoringState *state, PyObject *codelike, in } int -_PyMonitoring_FireStopIterationEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset) +_PyMonitoring_FireStopIterationEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset, PyObject *value) { int event = PY_MONITORING_EVENT_STOP_ITERATION; assert(state->active); + assert(!PyErr_Occurred()); + PyErr_SetObject(PyExc_StopIteration, value); PyObject *exc; if (exception_event_setup(&exc, event) < 0) { return -1; } PyObject *args[4] = { NULL, NULL, NULL, exc }; int err = capi_call_instrumentation(state, codelike, offset, args, 3, event); - return exception_event_teardown(err, exc); + Py_DECREF(exc); + return exception_event_teardown(err, NULL); } diff --git a/Python/jit.c b/Python/jit.c index 7c316a410dda6a..d0c0d24f4539e2 100644 --- a/Python/jit.c +++ b/Python/jit.c @@ -5,6 +5,7 @@ #include "pycore_abstract.h" #include "pycore_call.h" #include "pycore_ceval.h" +#include "pycore_critical_section.h" #include "pycore_dict.h" #include "pycore_intrinsics.h" #include "pycore_long.h" diff --git a/Python/lock.c b/Python/lock.c index 239e56ad929ea3..555f4c25b9b214 100644 --- a/Python/lock.c +++ b/Python/lock.c @@ -366,6 +366,48 @@ _PyOnceFlag_CallOnceSlow(_PyOnceFlag *flag, _Py_once_fn_t *fn, void *arg) } } +static int +recursive_mutex_is_owned_by(_PyRecursiveMutex *m, PyThread_ident_t tid) +{ + return _Py_atomic_load_ullong_relaxed(&m->thread) == tid; +} + +int +_PyRecursiveMutex_IsLockedByCurrentThread(_PyRecursiveMutex *m) +{ + return recursive_mutex_is_owned_by(m, PyThread_get_thread_ident_ex()); +} + +void +_PyRecursiveMutex_Lock(_PyRecursiveMutex *m) +{ + PyThread_ident_t thread = PyThread_get_thread_ident_ex(); + if (recursive_mutex_is_owned_by(m, thread)) { + m->level++; + return; + } + PyMutex_Lock(&m->mutex); + _Py_atomic_store_ullong_relaxed(&m->thread, thread); + assert(m->level == 0); +} + +void +_PyRecursiveMutex_Unlock(_PyRecursiveMutex *m) +{ + PyThread_ident_t thread = PyThread_get_thread_ident_ex(); + if (!recursive_mutex_is_owned_by(m, thread)) { + Py_FatalError("unlocking a recursive mutex that is not owned by the" + " current thread"); + } + if (m->level > 0) { + m->level--; + return; + } + assert(m->level == 0); + _Py_atomic_store_ullong_relaxed(&m->thread, 0); + PyMutex_Unlock(&m->mutex); +} + #define _Py_WRITE_LOCKED 1 #define _PyRWMutex_READER_SHIFT 2 #define _Py_RWMUTEX_MAX_READERS (UINTPTR_MAX >> _PyRWMutex_READER_SHIFT) diff --git a/Python/optimizer.c b/Python/optimizer.c index 8be2c0ffbd78e9..9dddc43776f21a 100644 --- a/Python/optimizer.c +++ b/Python/optimizer.c @@ -1496,7 +1496,7 @@ address_to_hash(void *ptr) { uintptr_t addr = (uintptr_t)ptr; for (int i = 0; i < SIZEOF_VOID_P; i++) { uhash ^= addr & 255; - uhash *= (uint64_t)_PyHASH_MULTIPLIER; + uhash *= (uint64_t)PyHASH_MULTIPLIER; addr >>= 8; } return uhash; diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h index 2a4efd73d794df..6c2480d4fdfe89 100644 --- a/Python/optimizer_cases.c.h +++ b/Python/optimizer_cases.c.h @@ -815,13 +815,7 @@ break; } - case _LOAD_FROM_DICT_OR_GLOBALS: { - _Py_UopsSymbol *v; - v = sym_new_not_null(ctx); - if (v == NULL) goto out_of_space; - stack_pointer[-1] = v; - break; - } + /* _LOAD_FROM_DICT_OR_GLOBALS is not a viable micro-op for tier 2 */ /* _LOAD_NAME is not a viable micro-op for tier 2 */ diff --git a/Python/parking_lot.c b/Python/parking_lot.c index e2def9e249cd56..841b1d71ea16cb 100644 --- a/Python/parking_lot.c +++ b/Python/parking_lot.c @@ -119,7 +119,7 @@ _PySemaphore_PlatformWait(_PySemaphore *sema, PyTime_t timeout) if (timeout >= 0) { struct timespec ts; -#if defined(CLOCK_MONOTONIC) && defined(HAVE_SEM_CLOCKWAIT) +#if defined(CLOCK_MONOTONIC) && defined(HAVE_SEM_CLOCKWAIT) && !defined(_Py_THREAD_SANITIZER) PyTime_t now; // silently ignore error: cannot report error to the caller (void)PyTime_MonotonicRaw(&now); diff --git a/Python/perf_jit_trampoline.c b/Python/perf_jit_trampoline.c index 2a29318b1054a5..0a8945958b4b3c 100644 --- a/Python/perf_jit_trampoline.c +++ b/Python/perf_jit_trampoline.c @@ -174,8 +174,8 @@ static const uint8_t DwarfDataRel = 0x30; typedef struct { unsigned char version; unsigned char eh_frame_ptr_enc; - unsigned char fde_count_enc; - unsigned char table_enc; + unsigned char fde_count_enc; + unsigned char table_enc; int32_t eh_frame_ptr; int32_t eh_fde_count; int32_t from; diff --git a/Python/pyhash.c b/Python/pyhash.c index d508d78092a9e7..5263622ff3126d 100644 --- a/Python/pyhash.c +++ b/Python/pyhash.c @@ -263,12 +263,12 @@ fnv(const void *src, Py_ssize_t len) x ^= (Py_uhash_t) *p << 7; while (blocks--) { PY_UHASH_CPY(block.bytes, p); - x = (_PyHASH_MULTIPLIER * x) ^ block.value; + x = (PyHASH_MULTIPLIER * x) ^ block.value; p += SIZEOF_PY_UHASH_T; } /* add remainder */ for (; remainder > 0; remainder--) - x = (_PyHASH_MULTIPLIER * x) ^ (Py_uhash_t) *p++; + x = (PyHASH_MULTIPLIER * x) ^ (Py_uhash_t) *p++; x ^= (Py_uhash_t) len; x ^= (Py_uhash_t) _Py_HashSecret.fnv.suffix; if (x == (Py_uhash_t) -1) { diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 67bbbd01ca0c48..cbdf5c1b771fff 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -2119,6 +2119,12 @@ Py_FinalizeEx(void) } #endif /* Py_TRACE_REFS */ +#ifdef WITH_PYMALLOC + if (malloc_stats) { + _PyObject_DebugMallocStats(stderr); + } +#endif + finalize_interp_delete(tstate->interp); #ifdef Py_REF_DEBUG @@ -2129,12 +2135,6 @@ Py_FinalizeEx(void) #endif _Py_FinalizeAllocatedBlocks(runtime); -#ifdef WITH_PYMALLOC - if (malloc_stats) { - _PyObject_DebugMallocStats(stderr); - } -#endif - call_ll_exitfuncs(runtime); _PyRuntime_Finalize(); diff --git a/Python/pystate.c b/Python/pystate.c index b1e085bb806915..e1a95907b57d20 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -1038,6 +1038,17 @@ _PyInterpreterState_DeleteExceptMain(_PyRuntimeState *runtime) } #endif +static inline void +set_main_thread(PyInterpreterState *interp, PyThreadState *tstate) +{ + _Py_atomic_store_ptr_relaxed(&interp->threads.main, tstate); +} + +static inline PyThreadState * +get_main_thread(PyInterpreterState *interp) +{ + return _Py_atomic_load_ptr_relaxed(&interp->threads.main); +} int _PyInterpreterState_SetRunningMain(PyInterpreterState *interp) @@ -1052,21 +1063,22 @@ _PyInterpreterState_SetRunningMain(PyInterpreterState *interp) "current tstate has wrong interpreter"); return -1; } - interp->threads.main = tstate; + set_main_thread(interp, tstate); + return 0; } void _PyInterpreterState_SetNotRunningMain(PyInterpreterState *interp) { - assert(interp->threads.main == current_fast_get()); - interp->threads.main = NULL; + assert(get_main_thread(interp) == current_fast_get()); + set_main_thread(interp, NULL); } int _PyInterpreterState_IsRunningMain(PyInterpreterState *interp) { - if (interp->threads.main != NULL) { + if (get_main_thread(interp) != NULL) { return 1; } // Embedders might not know to call _PyInterpreterState_SetRunningMain(), @@ -1082,18 +1094,15 @@ int _PyThreadState_IsRunningMain(PyThreadState *tstate) { PyInterpreterState *interp = tstate->interp; - if (interp->threads.main != NULL) { - return tstate == interp->threads.main; - } // See the note in _PyInterpreterState_IsRunningMain() about // possible false negatives here for embedders. - return 0; + return get_main_thread(interp) == tstate; } int _PyInterpreterState_FailIfRunningMain(PyInterpreterState *interp) { - if (interp->threads.main != NULL) { + if (get_main_thread(interp) != NULL) { PyErr_SetString(PyExc_InterpreterError, "interpreter already running"); return -1; @@ -1105,8 +1114,8 @@ void _PyInterpreterState_ReinitRunningMain(PyThreadState *tstate) { PyInterpreterState *interp = tstate->interp; - if (interp->threads.main != tstate) { - interp->threads.main = NULL; + if (get_main_thread(interp) != tstate) { + set_main_thread(interp, NULL); } } @@ -1574,9 +1583,7 @@ new_threadstate(PyInterpreterState *interp, int whence) } else { #ifdef Py_GIL_DISABLED - if (interp->gc.immortalize.enable_on_thread_created && - !interp->gc.immortalize.enabled) - { + if (_Py_atomic_load_int(&interp->gc.immortalize) == 0) { // Immortalize objects marked as using deferred reference counting // the first time a non-main thread is created. _PyGC_ImmortalizeDeferredObjects(interp); @@ -1742,7 +1749,7 @@ decrement_stoptheworld_countdown(struct _stoptheworld_state *stw); /* Common code for PyThreadState_Delete() and PyThreadState_DeleteCurrent() */ static void -tstate_delete_common(PyThreadState *tstate) +tstate_delete_common(PyThreadState *tstate, int release_gil) { assert(tstate->_status.cleared && !tstate->_status.finalized); tstate_verify_not_active(tstate); @@ -1784,10 +1791,6 @@ tstate_delete_common(PyThreadState *tstate) HEAD_UNLOCK(runtime); -#ifdef Py_GIL_DISABLED - _Py_qsbr_unregister((_PyThreadStateImpl *)tstate); -#endif - // XXX Unbind in PyThreadState_Clear(), or earlier // (and assert not-equal here)? if (tstate->_status.bound_gilstate) { @@ -1798,6 +1801,14 @@ tstate_delete_common(PyThreadState *tstate) // XXX Move to PyThreadState_Clear()? clear_datastack(tstate); + if (release_gil) { + _PyEval_ReleaseLock(tstate->interp, tstate, 1); + } + +#ifdef Py_GIL_DISABLED + _Py_qsbr_unregister(tstate); +#endif + tstate->_status.finalized = 1; } @@ -1809,7 +1820,7 @@ zapthreads(PyInterpreterState *interp) when the threads are all really dead (XXX famous last words). */ while ((tstate = interp->threads.head) != NULL) { tstate_verify_not_active(tstate); - tstate_delete_common(tstate); + tstate_delete_common(tstate, 0); free_threadstate((_PyThreadStateImpl *)tstate); } } @@ -1820,7 +1831,7 @@ PyThreadState_Delete(PyThreadState *tstate) { _Py_EnsureTstateNotNULL(tstate); tstate_verify_not_active(tstate); - tstate_delete_common(tstate); + tstate_delete_common(tstate, 0); free_threadstate((_PyThreadStateImpl *)tstate); } @@ -1833,8 +1844,7 @@ _PyThreadState_DeleteCurrent(PyThreadState *tstate) _Py_qsbr_detach(((_PyThreadStateImpl *)tstate)->qsbr); #endif current_fast_clear(tstate->interp->runtime); - tstate_delete_common(tstate); - _PyEval_ReleaseLock(tstate->interp, NULL); + tstate_delete_common(tstate, 1); // release GIL as part of call free_threadstate((_PyThreadStateImpl *)tstate); } @@ -2059,7 +2069,7 @@ _PyThreadState_Attach(PyThreadState *tstate) while (1) { - int acquired_gil = _PyEval_AcquireLock(tstate); + _PyEval_AcquireLock(tstate); // XXX assert(tstate_is_alive(tstate)); current_fast_set(&_PyRuntime, tstate); @@ -2070,20 +2080,17 @@ _PyThreadState_Attach(PyThreadState *tstate) } #ifdef Py_GIL_DISABLED - if (_PyEval_IsGILEnabled(tstate) != acquired_gil) { + if (_PyEval_IsGILEnabled(tstate) && !tstate->_status.holds_gil) { // The GIL was enabled between our call to _PyEval_AcquireLock() // and when we attached (the GIL can't go from enabled to disabled // here because only a thread holding the GIL can disable // it). Detach and try again. - assert(!acquired_gil); tstate_set_detached(tstate, _Py_THREAD_DETACHED); tstate_deactivate(tstate); current_fast_clear(&_PyRuntime); continue; } _Py_qsbr_attach(((_PyThreadStateImpl *)tstate)->qsbr); -#else - (void)acquired_gil; #endif break; } @@ -2114,7 +2121,7 @@ detach_thread(PyThreadState *tstate, int detached_state) tstate_deactivate(tstate); tstate_set_detached(tstate, detached_state); current_fast_clear(&_PyRuntime); - _PyEval_ReleaseLock(tstate->interp, tstate); + _PyEval_ReleaseLock(tstate->interp, tstate, 0); } void @@ -2802,12 +2809,18 @@ PyGILState_Release(PyGILState_STATE oldstate) /* can't have been locked when we created it */ assert(oldstate == PyGILState_UNLOCKED); // XXX Unbind tstate here. + // gh-119585: `PyThreadState_Clear()` may call destructors that + // themselves use PyGILState_Ensure and PyGILState_Release, so make + // sure that gilstate_counter is not zero when calling it. + ++tstate->gilstate_counter; PyThreadState_Clear(tstate); + --tstate->gilstate_counter; /* Delete the thread-state. Note this releases the GIL too! * It's vital that the GIL be held here, to avoid shutdown * races; see bugs 225673 and 1061968 (that nasty bug has a * habit of coming back). */ + assert(tstate->gilstate_counter == 0); assert(current_fast_get() == tstate); _PyThreadState_DeleteCurrent(tstate); } @@ -3061,6 +3074,8 @@ tstate_mimalloc_bind(PyThreadState *tstate) // _PyObject_GC_New() and similar functions temporarily override this to // use one of the GC heaps. mts->current_object_heap = &mts->heaps[_Py_MIMALLOC_HEAP_OBJECT]; + + _Py_atomic_store_int(&mts->initialized, 1); #endif } diff --git a/Python/qsbr.c b/Python/qsbr.c index d7ac8f479cda1b..a7321154a62ffc 100644 --- a/Python/qsbr.c +++ b/Python/qsbr.c @@ -160,7 +160,8 @@ qsbr_poll_scan(struct _qsbr_shared *shared) bool _Py_qsbr_poll(struct _qsbr_thread_state *qsbr, uint64_t goal) { - assert(_PyThreadState_GET()->state == _Py_THREAD_ATTACHED); + assert(_Py_atomic_load_int_relaxed(&_PyThreadState_GET()->state) == _Py_THREAD_ATTACHED); + if (_Py_qbsr_goal_reached(qsbr, goal)) { return true; } @@ -231,20 +232,26 @@ _Py_qsbr_register(_PyThreadStateImpl *tstate, PyInterpreterState *interp, } void -_Py_qsbr_unregister(_PyThreadStateImpl *tstate) +_Py_qsbr_unregister(PyThreadState *tstate) { - struct _qsbr_shared *shared = tstate->qsbr->shared; + struct _qsbr_shared *shared = &tstate->interp->qsbr; + struct _PyThreadStateImpl *tstate_imp = (_PyThreadStateImpl*) tstate; + + // gh-119369: GIL must be released (if held) to prevent deadlocks, because + // we might not have an active tstate, which means taht blocking on PyMutex + // locks will not implicitly release the GIL. + assert(!tstate->_status.holds_gil); PyMutex_Lock(&shared->mutex); // NOTE: we must load (or reload) the thread state's qbsr inside the mutex // because the array may have been resized (changing tstate->qsbr) while // we waited to acquire the mutex. - struct _qsbr_thread_state *qsbr = tstate->qsbr; + struct _qsbr_thread_state *qsbr = tstate_imp->qsbr; assert(qsbr->seq == 0 && "thread state must be detached"); - assert(qsbr->allocated && qsbr->tstate == (PyThreadState *)tstate); + assert(qsbr->allocated && qsbr->tstate == tstate); - tstate->qsbr = NULL; + tstate_imp->qsbr = NULL; qsbr->tstate = NULL; qsbr->allocated = false; qsbr->freelist_next = shared->freelist; diff --git a/Python/symtable.c b/Python/symtable.c index 2ec21a2d376da2..d8240cdd11f7ea 100644 --- a/Python/symtable.c +++ b/Python/symtable.c @@ -103,6 +103,7 @@ ste_new(struct symtable *st, identifier name, _Py_block_ty block, ste->ste_children = NULL; ste->ste_directives = NULL; + ste->ste_mangled_names = NULL; ste->ste_type = block; ste->ste_nested = 0; @@ -166,6 +167,7 @@ ste_dealloc(PySTEntryObject *ste) Py_XDECREF(ste->ste_varnames); Py_XDECREF(ste->ste_children); Py_XDECREF(ste->ste_directives); + Py_XDECREF(ste->ste_mangled_names); PyObject_Free(ste); } @@ -1338,6 +1340,11 @@ symtable_enter_block(struct symtable *st, identifier name, _Py_block_ty block, if (prev) { ste->ste_comp_iter_expr = prev->ste_comp_iter_expr; } + /* No need to inherit ste_mangled_names in classes, where all names + * are mangled. */ + if (prev && prev->ste_mangled_names != NULL && block != ClassBlock) { + ste->ste_mangled_names = Py_NewRef(prev->ste_mangled_names); + } /* The entry is owned by the stack. Borrow it for st_cur. */ Py_DECREF(ste); st->st_cur = ste; @@ -1363,7 +1370,7 @@ symtable_enter_block(struct symtable *st, identifier name, _Py_block_ty block, static long symtable_lookup_entry(struct symtable *st, PySTEntryObject *ste, PyObject *name) { - PyObject *mangled = _Py_Mangle(st->st_private, name); + PyObject *mangled = _Py_MaybeMangle(st->st_private, ste, name); if (!mangled) return 0; long ret = _PyST_GetSymbol(ste, mangled); @@ -1384,8 +1391,7 @@ symtable_add_def_helper(struct symtable *st, PyObject *name, int flag, struct _s PyObject *o; PyObject *dict; long val; - PyObject *mangled = _Py_Mangle(st->st_private, name); - + PyObject *mangled = _Py_MaybeMangle(st->st_private, st->st_cur, name); if (!mangled) return 0; @@ -1474,6 +1480,11 @@ static int symtable_add_def(struct symtable *st, PyObject *name, int flag, int lineno, int col_offset, int end_lineno, int end_col_offset) { + if ((flag & DEF_TYPE_PARAM) && st->st_cur->ste_mangled_names != NULL) { + if(PySet_Add(st->st_cur->ste_mangled_names, name) < 0) { + return 0; + } + } return symtable_add_def_helper(st, name, flag, st->st_cur, lineno, col_offset, end_lineno, end_col_offset); } @@ -1508,7 +1519,6 @@ symtable_enter_type_param_block(struct symtable *st, identifier name, lineno, col_offset, end_lineno, end_col_offset)) { return 0; } - st->st_private = name; // This is used for setting the generic base _Py_DECLARE_STR(generic_base, ".generic_base"); if (!symtable_add_def(st, &_Py_STR(generic_base), DEF_LOCAL, @@ -1597,7 +1607,7 @@ symtable_record_directive(struct symtable *st, identifier name, int lineno, if (!st->st_cur->ste_directives) return 0; } - mangled = _Py_Mangle(st->st_private, name); + mangled = _Py_MaybeMangle(st->st_private, st->st_cur, name); if (!mangled) return 0; data = Py_BuildValue("(Niiii)", mangled, lineno, col_offset, end_lineno, end_col_offset); @@ -1673,6 +1683,7 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) VISIT_QUIT(st, 0); if (s->v.ClassDef.decorator_list) VISIT_SEQ(st, expr, s->v.ClassDef.decorator_list); + tmp = st->st_private; if (asdl_seq_LEN(s->v.ClassDef.type_params) > 0) { if (!symtable_enter_type_param_block(st, s->v.ClassDef.name, (void *)s->v.ClassDef.type_params, @@ -1680,6 +1691,11 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) LOCATION(s))) { VISIT_QUIT(st, 0); } + st->st_private = s->v.ClassDef.name; + st->st_cur->ste_mangled_names = PySet_New(NULL); + if (!st->st_cur->ste_mangled_names) { + VISIT_QUIT(st, 0); + } VISIT_SEQ(st, type_param, s->v.ClassDef.type_params); } VISIT_SEQ(st, expr, s->v.ClassDef.bases); @@ -1688,7 +1704,6 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) (void *)s, s->lineno, s->col_offset, s->end_lineno, s->end_col_offset)) VISIT_QUIT(st, 0); - tmp = st->st_private; st->st_private = s->v.ClassDef.name; if (asdl_seq_LEN(s->v.ClassDef.type_params) > 0) { if (!symtable_add_def(st, &_Py_ID(__type_params__), @@ -1702,13 +1717,13 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) } } VISIT_SEQ(st, stmt, s->v.ClassDef.body); - st->st_private = tmp; if (!symtable_exit_block(st)) VISIT_QUIT(st, 0); if (asdl_seq_LEN(s->v.ClassDef.type_params) > 0) { if (!symtable_exit_block(st)) VISIT_QUIT(st, 0); } + st->st_private = tmp; break; } case TypeAlias_kind: { @@ -2776,6 +2791,26 @@ _Py_SymtableStringObjectFlags(const char *str, PyObject *filename, return st; } +PyObject * +_Py_MaybeMangle(PyObject *privateobj, PySTEntryObject *ste, PyObject *name) +{ + /* Special case for type parameter blocks around generic classes: + * we want to mangle type parameter names (so a type param with a private + * name can be used inside the class body), but we don't want to mangle + * any other names that appear within the type parameter scope. + */ + if (ste->ste_mangled_names != NULL) { + int result = PySet_Contains(ste->ste_mangled_names, name); + if (result < 0) { + return NULL; + } + if (result == 0) { + return Py_NewRef(name); + } + } + return _Py_Mangle(privateobj, name); +} + PyObject * _Py_Mangle(PyObject *privateobj, PyObject *ident) { diff --git a/Python/sysmodule.c b/Python/sysmodule.c index 4da13e4552e786..00aa95531026b5 100644 --- a/Python/sysmodule.c +++ b/Python/sysmodule.c @@ -35,7 +35,6 @@ Data members: #include "pycore_sysmodule.h" // export _PySys_GetSizeOf() #include "pycore_tuple.h" // _PyTuple_FromArray() -#include "frameobject.h" // PyFrame_FastToLocalsWithError() #include "pydtrace.h" // PyDTrace_AUDIT() #include "osdefs.h" // DELIM #include "stdlib_module_names.h" // _Py_stdlib_module_names diff --git a/Python/tracemalloc.c b/Python/tracemalloc.c index e3ec72062f6931..fee7dd0e56d96d 100644 --- a/Python/tracemalloc.c +++ b/Python/tracemalloc.c @@ -312,7 +312,7 @@ traceback_hash(traceback_t *traceback) /* code based on tuplehash() of Objects/tupleobject.c */ Py_uhash_t x, y; /* Unsigned for defined overflow behavior. */ int len = traceback->nframe; - Py_uhash_t mult = _PyHASH_MULTIPLIER; + Py_uhash_t mult = PyHASH_MULTIPLIER; frame_t *frame; x = 0x345678UL; diff --git a/README.rst b/README.rst index ba8ba82ed87e3f..52b884c72ed088 100644 --- a/README.rst +++ b/README.rst @@ -1,4 +1,4 @@ -This is Python version 3.13.0 beta 1 +This is Python version 3.13.0 beta 2 ==================================== .. image:: https://github.com/python/cpython/workflows/Tests/badge.svg diff --git a/Tools/build/generate_sbom.py b/Tools/build/generate_sbom.py index 258b58c03c6800..c08568f2e00326 100644 --- a/Tools/build/generate_sbom.py +++ b/Tools/build/generate_sbom.py @@ -305,7 +305,21 @@ def create_externals_sbom() -> None: # Set the versionInfo and downloadLocation fields for all packages. for package in sbom_data["packages"]: - package["versionInfo"] = externals_name_to_version[package["name"]] + package_version = externals_name_to_version[package["name"]] + + # Update the version information in all the locations. + package["versionInfo"] = package_version + for external_ref in package["externalRefs"]: + if external_ref["referenceType"] != "cpe23Type": + continue + # Version is the fifth field of a CPE. + cpe23ref = external_ref["referenceLocator"] + external_ref["referenceLocator"] = re.sub( + r"\A(cpe(?::[^:]+){4}):[^:]+:", + fr"\1:{package_version}:", + cpe23ref + ) + download_location = ( f"https://github.com/python/cpython-source-deps/archive/refs/tags/{externals_name_to_git_tag[package['name']]}.tar.gz" ) diff --git a/Tools/c-analyzer/cpython/globals-to-fix.tsv b/Tools/c-analyzer/cpython/globals-to-fix.tsv index 1b8cccf80872c8..aeb7cad9e027c0 100644 --- a/Tools/c-analyzer/cpython/globals-to-fix.tsv +++ b/Tools/c-analyzer/cpython/globals-to-fix.tsv @@ -304,6 +304,10 @@ Python/crossinterp_exceptions.h - PyExc_InterpreterNotFoundError - ##----------------------- ## singletons +Modules/_datetimemodule.c - zero_delta - +Modules/_datetimemodule.c - utc_timezone - +Modules/_datetimemodule.c - capi - +Modules/_datetimemodule.c - _globals - Objects/boolobject.c - _Py_FalseStruct - Objects/boolobject.c - _Py_TrueStruct - Objects/dictobject.c - empty_keys_struct - diff --git a/Tools/c-analyzer/cpython/ignored.tsv b/Tools/c-analyzer/cpython/ignored.tsv index a1c1553d8ddc10..adb7f8e5d64745 100644 --- a/Tools/c-analyzer/cpython/ignored.tsv +++ b/Tools/c-analyzer/cpython/ignored.tsv @@ -217,6 +217,7 @@ Modules/_datetimemodule.c - max_fold_seconds - Modules/_datetimemodule.c datetime_isoformat specs - Modules/_datetimemodule.c parse_hh_mm_ss_ff correction - Modules/_datetimemodule.c time_isoformat specs - +Modules/_datetimemodule.c - capi_types - Modules/_decimal/_decimal.c - cond_map_template - Modules/_decimal/_decimal.c - dec_signal_string - Modules/_decimal/_decimal.c - dflt_ctx - diff --git a/Tools/clinic/libclinic/parse_args.py b/Tools/clinic/libclinic/parse_args.py index 905f2a0ba94f4c..0f67901dd8609a 100644 --- a/Tools/clinic/libclinic/parse_args.py +++ b/Tools/clinic/libclinic/parse_args.py @@ -38,6 +38,8 @@ def declare_parser( p for p in f.parameters.values() if not p.is_positional_only() and not p.is_vararg() ]) + + condition = '#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)' if limited_capi: declarations = """ #define KWTUPLE NULL @@ -50,7 +52,12 @@ def declare_parser( # define KWTUPLE NULL #endif """ + + codegen.add_include('pycore_runtime.h', '_Py_SINGLETON()', + condition=condition) else: + # XXX Why do we not statically allocate the tuple + # for non-builtin modules? declarations = """ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -71,9 +78,10 @@ def declare_parser( #endif // !Py_BUILD_CORE """ % num_keywords - condition = '#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)' - codegen.add_include('pycore_gc.h', 'PyGC_Head', condition=condition) - codegen.add_include('pycore_runtime.h', '_Py_ID()', condition=condition) + codegen.add_include('pycore_gc.h', 'PyGC_Head', + condition=condition) + codegen.add_include('pycore_runtime.h', '_Py_ID()', + condition=condition) declarations += """ static const char * const _keywords[] = {{{keywords_c} NULL}}; diff --git a/Tools/jit/README.md b/Tools/jit/README.md index 0f5aa9ce656bc9..ae126661c6ce25 100644 --- a/Tools/jit/README.md +++ b/Tools/jit/README.md @@ -21,6 +21,12 @@ chmod +x llvm.sh sudo ./llvm.sh 18 ``` +Install LLVM 18 on Fedora Linux 40 or newer: + +```sh +sudo dnf install 'clang(major) = 18' 'llvm(major) = 18' +``` + ### macOS Install LLVM 18 with [Homebrew](https://brew.sh): diff --git a/Tools/jit/_targets.py b/Tools/jit/_targets.py index 023ef498a21d7c..5604c429bcf8ad 100644 --- a/Tools/jit/_targets.py +++ b/Tools/jit/_targets.py @@ -212,13 +212,18 @@ def build( ): return stencil_groups = asyncio.run(self._build_stencils()) - with jit_stencils.open("w") as file: - file.write(digest) - if comment: - file.write(f"// {comment}\n\n") - file.write("") - for line in _writer.dump(stencil_groups): - file.write(f"{line}\n") + jit_stencils_new = out / "jit_stencils.h.new" + try: + with jit_stencils_new.open("w") as file: + file.write(digest) + if comment: + file.write(f"// {comment}\n") + file.write("\n") + for line in _writer.dump(stencil_groups): + file.write(f"{line}\n") + jit_stencils_new.replace(jit_stencils) + finally: + jit_stencils_new.unlink(missing_ok=True) class _COFF( @@ -349,6 +354,7 @@ def _handle_section( assert section_type in { "SHT_GROUP", "SHT_LLVM_ADDRSIG", + "SHT_NOTE", "SHT_NULL", "SHT_STRTAB", "SHT_SYMTAB", diff --git a/Tools/msi/freethreaded/freethreaded_files.wxs b/Tools/msi/freethreaded/freethreaded_files.wxs index adaf63c69d5ade..49ecb3429ad8f3 100644 --- a/Tools/msi/freethreaded/freethreaded_files.wxs +++ b/Tools/msi/freethreaded/freethreaded_files.wxs @@ -48,6 +48,12 @@ + + + + + + @@ -69,8 +75,14 @@ - - + + + + + + + + @@ -147,12 +159,6 @@ - - - - - - diff --git a/Tools/msi/pip/pip.wxs b/Tools/msi/pip/pip.wxs index 1d8083cad91a56..627c4710a9fdfa 100644 --- a/Tools/msi/pip/pip.wxs +++ b/Tools/msi/pip/pip.wxs @@ -25,8 +25,8 @@ - - + + (&DefaultFeature=3) AND NOT (!DefaultFeature=3) diff --git a/Tools/tsan/suppressions_free_threading.txt b/Tools/tsan/suppressions_free_threading.txt index 74dbf4bb1cb688..cb48a30751ac7b 100644 --- a/Tools/tsan/suppressions_free_threading.txt +++ b/Tools/tsan/suppressions_free_threading.txt @@ -11,34 +11,61 @@ race:set_allocator_unlocked ## Free-threaded suppressions -race:_add_to_weak_set -race:_in_weak_set -race:_mi_heap_delayed_free_partial -race:_PyEval_EvalFrameDefault -race:_PyImport_AcquireLock -race:_PyImport_ReleaseLock -race:_PyInterpreterState_SetNotRunningMain -race:_PyInterpreterState_IsRunningMain -race:_PyObject_GC_IS_SHARED -race:_PyObject_GC_SET_SHARED -race:_PyObject_GC_TRACK -# https://gist.github.com/mpage/0a24eb2dd458441ededb498e9b0e5de8 -race:_PyParkingLot_Park -race:_PyType_HasFeature -race:assign_version_tag -race:gc_restore_tid -race:initialize_new_array -race:insertdict -race:lookup_tp_dict -race:mi_heap_visit_pages -race:PyMember_GetOne -race:PyMember_SetOne -race:new_reference -race:set_contains_key -race:set_inheritable -race:start_the_world -race:tstate_set_detached -race:unicode_hash + +# These entries are for warnings that trigger in a library function, as called +# by a CPython function. + +# https://gist.github.com/swtaarrs/8e0e365e1d9cecece3269a2fb2f2b8b8 +race:sock_recv_impl +# https://gist.github.com/swtaarrs/08dfe7883b4c975c31ecb39388987a67 +race:free_threadstate + + +# These warnings trigger directly in a CPython function. + +race_top:_add_to_weak_set +race_top:_in_weak_set +race_top:_PyEval_EvalFrameDefault +race_top:_PyType_HasFeature +race_top:assign_version_tag +race_top:insertdict +race_top:lookup_tp_dict +race_top:PyMember_GetOne +race_top:PyMember_SetOne +race_top:new_reference +race_top:set_contains_key +# https://gist.github.com/colesbury/d13d033f413b4ad07929d044bed86c35 +race_top:set_discard_entry +race_top:set_inheritable +race_top:Py_SET_TYPE +race_top:_PyDict_CheckConsistency +race_top:_Py_dict_lookup_threadsafe +race_top:_multiprocessing_SemLock_acquire_impl +race_top:dictiter_new +race_top:dictresize +race_top:insert_to_emptydict +race_top:insertdict +race_top:list_get_item_ref +race_top:make_pending_calls +race_top:set_add_entry +race_top:_Py_slot_tp_getattr_hook +race_top:add_threadstate +race_top:dump_traceback +race_top:fatal_error +race_top:_multiprocessing_SemLock_release_impl +race_top:_PyFrame_GetCode +race_top:_PyFrame_Initialize +race_top:PyInterpreterState_ThreadHead +race_top:_PyObject_TryGetInstanceAttribute +race_top:PyThreadState_Next +race_top:Py_TYPE +race_top:PyUnstable_InterpreterFrame_GetLine +race_top:sock_close +race_top:tstate_delete_common +race_top:tstate_is_freed +race_top:type_modified_unlocked +race_top:write_thread_id +race_top:PyThreadState_Clear # https://gist.github.com/mpage/6962e8870606cfc960e159b407a0cb40 thread:pthread_create diff --git a/Tools/tsan/supressions.txt b/Tools/tsan/supressions.txt index c778c791eacce8..22ba9d6ba2ab4d 100644 --- a/Tools/tsan/supressions.txt +++ b/Tools/tsan/supressions.txt @@ -2,3 +2,6 @@ # reference: https://github.com/google/sanitizers/wiki/ThreadSanitizerSuppressions race:get_allocator_unlocked race:set_allocator_unlocked + +# https://gist.github.com/mpage/daaf32b39180c1989572957b943eb665 +thread:pthread_create diff --git a/Tools/wasm/python.html b/Tools/wasm/python.html index 17ffa0ea8bfeff..81a035a5c4cd93 100644 --- a/Tools/wasm/python.html +++ b/Tools/wasm/python.html @@ -35,11 +35,12 @@

Simple REPL for Python WASM

-
+
+ +
+
The simple REPL provides a limited Python experience in the browser. diff --git a/Tools/wasm/python.worker.js b/Tools/wasm/python.worker.js index 1b794608fffe7b..4ce4e16fc0fa19 100644 --- a/Tools/wasm/python.worker.js +++ b/Tools/wasm/python.worker.js @@ -19,18 +19,18 @@ class StdinBuffer { } stdin = () => { - if (this.numberOfCharacters + 1 === this.readIndex) { + while (this.numberOfCharacters + 1 === this.readIndex) { if (!this.sentNull) { // Must return null once to indicate we're done for now. this.sentNull = true return null } this.sentNull = false + // Prompt will reset this.readIndex to 1 this.prompt() } const char = this.buffer[this.readIndex] this.readIndex += 1 - // How do I send an EOF?? return char } } @@ -71,7 +71,11 @@ var Module = { onmessage = (event) => { if (event.data.type === 'run') { - // TODO: Set up files from event.data.files + if (event.data.files) { + for (const [filename, contents] of Object.entries(event.data.files)) { + Module.FS.writeFile(filename, contents) + } + } const ret = callMain(event.data.args) postMessage({ type: 'finished', diff --git a/configure b/configure index de426e6b686e68..4626675f8c30a1 100755 --- a/configure +++ b/configure @@ -14618,6 +14618,8 @@ fi { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $with_system_libmpdec" >&5 printf "%s\n" "$with_system_libmpdec" >&6; } + + if test "x$with_system_libmpdec" = xyes then : @@ -14697,8 +14699,10 @@ printf "%s\n" "yes" >&6; } fi else $as_nop LIBMPDEC_CFLAGS="-I\$(srcdir)/Modules/_decimal/libmpdec" - LIBMPDEC_LIBS="-lm \$(LIBMPDEC_A)" - LIBMPDEC_INTERNAL="\$(LIBMPDEC_HEADERS) \$(LIBMPDEC_A)" + LIBMPDEC_LIBS="-lm \$(LIBMPDEC_A)" + LIBMPDEC_INTERNAL="\$(LIBMPDEC_HEADERS) \$(LIBMPDEC_A)" + have_mpdec=yes + with_system_libmpdec=no fi if test "x$with_system_libmpdec" = xyes @@ -14745,15 +14749,19 @@ LIBS=$save_LIBS else $as_nop - have_mpdec=yes - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: the bundled copy of libmpdecimal is scheduled for removal in Python 3.15; consider using a system installed mpdecimal library." >&5 + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: the bundled copy of libmpdecimal is scheduled for removal in Python 3.15; consider using a system installed mpdecimal library." >&5 printf "%s\n" "$as_me: WARNING: the bundled copy of libmpdecimal is scheduled for removal in Python 3.15; consider using a system installed mpdecimal library." >&2;} fi if test "$with_system_libmpdec" = "yes" && test "$have_mpdec" = "no" then : - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: no system libmpdecimal found; unable to build _decimal" >&5 -printf "%s\n" "$as_me: WARNING: no system libmpdecimal found; unable to build _decimal" >&2;} + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: no system libmpdecimal found; falling back to bundled libmpdecimal (deprecated and scheduled for removal in Python 3.15)" >&5 +printf "%s\n" "$as_me: WARNING: no system libmpdecimal found; falling back to bundled libmpdecimal (deprecated and scheduled for removal in Python 3.15)" >&2;} + LIBMPDEC_CFLAGS="-I\$(srcdir)/Modules/_decimal/libmpdec" + LIBMPDEC_LIBS="-lm \$(LIBMPDEC_A)" + LIBMPDEC_INTERNAL="\$(LIBMPDEC_HEADERS) \$(LIBMPDEC_A)" + have_mpdec=yes + with_system_libmpdec=no fi # Disable forced inlining in debug builds, see GH-94847 diff --git a/configure.ac b/configure.ac index 8eb96767487592..c5631a2380f0f1 100644 --- a/configure.ac +++ b/configure.ac @@ -3980,6 +3980,13 @@ AC_ARG_WITH( [with_system_libmpdec="yes"]) AC_MSG_RESULT([$with_system_libmpdec]) +AC_DEFUN([USE_BUNDLED_LIBMPDEC], + [LIBMPDEC_CFLAGS="-I\$(srcdir)/Modules/_decimal/libmpdec" + LIBMPDEC_LIBS="-lm \$(LIBMPDEC_A)" + LIBMPDEC_INTERNAL="\$(LIBMPDEC_HEADERS) \$(LIBMPDEC_A)" + have_mpdec=yes + with_system_libmpdec=no]) + AS_VAR_IF( [with_system_libmpdec], [yes], [PKG_CHECK_MODULES( @@ -3987,9 +3994,7 @@ AS_VAR_IF( [LIBMPDEC_CFLAGS=${LIBMPDEC_CFLAGS-""} LIBMPDEC_LIBS=${LIBMPDEC_LIBS-"-lmpdec -lm"} LIBMPDEC_INTERNAL=])], - [LIBMPDEC_CFLAGS="-I\$(srcdir)/Modules/_decimal/libmpdec" - LIBMPDEC_LIBS="-lm \$(LIBMPDEC_A)" - LIBMPDEC_INTERNAL="\$(LIBMPDEC_HEADERS) \$(LIBMPDEC_A)"]) + [USE_BUNDLED_LIBMPDEC()]) AS_VAR_IF([with_system_libmpdec], [yes], [WITH_SAVE_ENV([ @@ -4006,13 +4011,15 @@ AS_VAR_IF([with_system_libmpdec], [yes], [have_mpdec=yes], [have_mpdec=no]) ])], - [AS_VAR_SET([have_mpdec], [yes]) - AC_MSG_WARN([m4_normalize([ + [AC_MSG_WARN([m4_normalize([ the bundled copy of libmpdecimal is scheduled for removal in Python 3.15; consider using a system installed mpdecimal library.])])]) AS_IF([test "$with_system_libmpdec" = "yes" && test "$have_mpdec" = "no"], - [AC_MSG_WARN([no system libmpdecimal found; unable to build _decimal])]) + [AC_MSG_WARN([m4_normalize([ + no system libmpdecimal found; falling back to bundled libmpdecimal + (deprecated and scheduled for removal in Python 3.15)])]) + USE_BUNDLED_LIBMPDEC()]) # Disable forced inlining in debug builds, see GH-94847 AS_VAR_IF(