diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile deleted file mode 100644 index ada5fb0fe64dc2..00000000000000 --- a/.devcontainer/Dockerfile +++ /dev/null @@ -1,24 +0,0 @@ -FROM docker.io/library/fedora:40 - -ENV CC=clang - -ENV WASI_SDK_VERSION=24 -ENV WASI_SDK_PATH=/opt/wasi-sdk - -ENV WASMTIME_HOME=/opt/wasmtime -ENV WASMTIME_VERSION=22.0.0 -ENV WASMTIME_CPU_ARCH=x86_64 - -RUN dnf -y --nodocs --setopt=install_weak_deps=False install /usr/bin/{blurb,clang,curl,git,ln,tar,xz} 'dnf-command(builddep)' && \ - dnf -y --nodocs --setopt=install_weak_deps=False builddep python3 && \ - dnf -y clean all - -RUN mkdir ${WASI_SDK_PATH} && \ - curl --location https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-${WASI_SDK_VERSION}/wasi-sdk-${WASI_SDK_VERSION}.0-x86_64-linux.tar.gz | \ - tar --strip-components 1 --directory ${WASI_SDK_PATH} --extract --gunzip - -RUN mkdir --parents ${WASMTIME_HOME} && \ - curl --location "https://github.com/bytecodealliance/wasmtime/releases/download/v${WASMTIME_VERSION}/wasmtime-v${WASMTIME_VERSION}-${WASMTIME_CPU_ARCH}-linux.tar.xz" | \ - xz --decompress | \ - tar --strip-components 1 --directory ${WASMTIME_HOME} -x && \ - ln -s ${WASMTIME_HOME}/wasmtime /usr/local/bin diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 0dc303015df5c7..64c85c1101e6e6 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,7 +1,5 @@ { - "build": { - "dockerfile": "Dockerfile" - }, + "image": "ghcr.io/python/devcontainer:2024.09.25.11038928730", "onCreateCommand": [ // Install common tooling. "dnf", diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 680f2ed5be031a..7e9c3caf23f079 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -207,7 +207,6 @@ Doc/c-api/stable.rst @encukou **/*bisect* @rhettinger **/*heapq* @rhettinger **/*functools* @rhettinger -**/*decimal* @rhettinger **/*dataclasses* @ericvsmith diff --git a/.github/ISSUE_TEMPLATE/crash.yml b/.github/ISSUE_TEMPLATE/crash.yml index c14d7cf2599d4c..6d73f7cae5c0ae 100644 --- a/.github/ISSUE_TEMPLATE/crash.yml +++ b/.github/ISSUE_TEMPLATE/crash.yml @@ -32,6 +32,7 @@ body: - "3.10" - "3.11" - "3.12" + - "3.13" - "CPython main branch" validations: required: true diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index e5f6fd47e7367b..ec7904c2e2cc73 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -195,13 +195,14 @@ jobs: build_ubuntu_ssltests: name: 'Ubuntu SSL tests with OpenSSL' - runs-on: ubuntu-22.04 + runs-on: ${{ matrix.os }} timeout-minutes: 60 needs: check_source if: needs.check_source.outputs.run_tests == 'true' strategy: fail-fast: false matrix: + os: [ubuntu-22.04] openssl_ver: [3.0.15, 3.1.7, 3.2.3, 3.3.2] env: OPENSSL_VER: ${{ matrix.openssl_ver }} @@ -231,7 +232,7 @@ jobs: uses: actions/cache@v4 with: path: ./multissl/openssl/${{ env.OPENSSL_VER }} - key: ${{ runner.os }}-multissl-openssl-${{ env.OPENSSL_VER }} + key: ${{ matrix.os }}-multissl-openssl-${{ env.OPENSSL_VER }} - name: Install OpenSSL if: steps.cache-openssl.outputs.cache-hit != 'true' run: python3 Tools/ssl/multissltests.py --steps=library --base-directory $MULTISSL_DIR --openssl $OPENSSL_VER --system Linux @@ -410,7 +411,7 @@ jobs: uses: actions/cache@v4 with: path: ./multissl/openssl/${{ env.OPENSSL_VER }} - key: ${{ runner.os }}-multissl-openssl-${{ env.OPENSSL_VER }} + key: ${{ matrix.os }}-multissl-openssl-${{ env.OPENSSL_VER }} - name: Install OpenSSL if: steps.cache-openssl.outputs.cache-hit != 'true' run: python3 Tools/ssl/multissltests.py --steps=library --base-directory $MULTISSL_DIR --openssl $OPENSSL_VER --system Linux diff --git a/.github/workflows/jit.yml b/.github/workflows/jit.yml index 5e3ac9e9e0fada..754f179f105591 100644 --- a/.github/workflows/jit.yml +++ b/.github/workflows/jit.yml @@ -110,8 +110,7 @@ jobs: - name: Native Windows if: runner.os == 'Windows' && matrix.architecture != 'ARM64' run: | - choco upgrade llvm -y - choco install llvm --allow-downgrade --no-progress --version ${{ matrix.llvm }} + choco install llvm --allow-downgrade --no-progress --version ${{ matrix.llvm }}.1.0 ./PCbuild/build.bat --experimental-jit ${{ matrix.debug && '-d' || '--pgo' }} -p ${{ matrix.architecture }} ./PCbuild/rt.bat ${{ matrix.debug && '-d' || '' }} -p ${{ matrix.architecture }} -q --multiprocess 0 --timeout 4500 --verbose2 --verbose3 @@ -119,8 +118,7 @@ jobs: - name: Emulated Windows if: runner.os == 'Windows' && matrix.architecture == 'ARM64' run: | - choco upgrade llvm -y - choco install llvm --allow-downgrade --no-progress --version ${{ matrix.llvm }} + choco install llvm --allow-downgrade --no-progress --version ${{ matrix.llvm }}.1.0 ./PCbuild/build.bat --experimental-jit ${{ matrix.debug && '-d' || '' }} -p ${{ matrix.architecture }} - name: Native macOS @@ -159,7 +157,7 @@ jobs: CC="${{ matrix.compiler == 'clang' && 'clang --target=$HOST' || '$HOST-gcc' }}" \ CPP="$CC --preprocess" \ HOSTRUNNER=qemu-${{ matrix.architecture }} \ - ./configure --enable-experimental-jit ${{ matrix.debug && '--with-pydebug' || '--enable-optimizations --with-lto' }} --build=x86_64-linux-gnu --host="$HOST" --with-build-python=../build/bin/python3 --with-pkg-config=no ac_cv_buggy_getaddrinfo=no ac_cv_file__dev_ptc=no ac_cv_file__dev_ptmx=yes + ./configure --enable-experimental-jit ${{ matrix.debug && '--with-pydebug' || '--with-lto' }} --build=x86_64-linux-gnu --host="$HOST" --with-build-python=../build/bin/python3 --with-pkg-config=no ac_cv_buggy_getaddrinfo=no ac_cv_file__dev_ptc=no ac_cv_file__dev_ptmx=yes make all --jobs 4 ./python -m test --ignorefile=Tools/jit/ignore-tests-emulated-linux.txt --multiprocess 0 --timeout 4500 --verbose2 --verbose3 diff --git a/.github/workflows/reusable-ubuntu.yml b/.github/workflows/reusable-ubuntu.yml index 01bd914af79fa0..769f1210de4d3c 100644 --- a/.github/workflows/reusable-ubuntu.yml +++ b/.github/workflows/reusable-ubuntu.yml @@ -14,7 +14,11 @@ jobs: build_ubuntu_reusable: name: 'build and test' timeout-minutes: 60 - runs-on: ubuntu-22.04 + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-22.04] env: FORCE_COLOR: 1 OPENSSL_VER: 3.0.15 @@ -36,7 +40,7 @@ jobs: uses: actions/cache@v4 with: path: ./multissl/openssl/${{ env.OPENSSL_VER }} - key: ${{ runner.os }}-multissl-openssl-${{ env.OPENSSL_VER }} + key: ${{ matrix.os }}-multissl-openssl-${{ env.OPENSSL_VER }} - name: Install OpenSSL if: steps.cache-openssl.outputs.cache-hit != 'true' run: python3 Tools/ssl/multissltests.py --steps=library --base-directory $MULTISSL_DIR --openssl $OPENSSL_VER --system Linux diff --git a/Doc/Makefile b/Doc/Makefile index a2d89343648dc1..70ad703ac77e82 100644 --- a/Doc/Makefile +++ b/Doc/Makefile @@ -306,12 +306,12 @@ serve: # for development releases: always build .PHONY: autobuild-dev autobuild-dev: - $(MAKE) dist SPHINXOPTS='$(SPHINXOPTS) -Ea -A daily=1' + $(MAKE) dist-no-html SPHINXOPTS='$(SPHINXOPTS) -Ea -A daily=1' -# for quick rebuilds (HTML only) +# for HTML-only rebuilds .PHONY: autobuild-dev-html autobuild-dev-html: - $(MAKE) html SPHINXOPTS='$(SPHINXOPTS) -Ea -A daily=1' + $(MAKE) dist-html SPHINXOPTS='$(SPHINXOPTS) -Ea -A daily=1' # for stable releases: only build if not in pre-release stage (alpha, beta) # release candidate downloads are okay, since the stable tree can be in that stage diff --git a/Doc/c-api/exceptions.rst b/Doc/c-api/exceptions.rst index 05349590975160..fc2336d120c259 100644 --- a/Doc/c-api/exceptions.rst +++ b/Doc/c-api/exceptions.rst @@ -733,7 +733,7 @@ Exception Classes This creates a class object derived from :exc:`Exception` (accessible in C as :c:data:`PyExc_Exception`). - The :attr:`!__module__` attribute of the new class is set to the first part (up + The :attr:`~type.__module__` attribute of the new class is set to the first part (up to the last dot) of the *name* argument, and the class name is set to the last part (after the last dot). The *base* argument can be used to specify alternate base classes; it can either be only one class or a tuple of classes. The *dict* diff --git a/Doc/c-api/init_config.rst b/Doc/c-api/init_config.rst index 0ef7d015be9b93..9dc9ba61e7a60f 100644 --- a/Doc/c-api/init_config.rst +++ b/Doc/c-api/init_config.rst @@ -1248,19 +1248,24 @@ PyConfig .. c:member:: int perf_profiling - Enable compatibility mode with the perf profiler? + Enable the Linux ``perf`` profiler support? - If non-zero, initialize the perf trampoline. See :ref:`perf_profiling` - for more information. + If equals to ``1``, enable support for the Linux ``perf`` profiler. - Set by :option:`-X perf <-X>` command-line option and by the - :envvar:`PYTHON_PERF_JIT_SUPPORT` environment variable for perf support - with stack pointers and :option:`-X perf_jit <-X>` command-line option - and by the :envvar:`PYTHON_PERF_JIT_SUPPORT` environment variable for perf - support with DWARF JIT information. + If equals to ``2``, enable support for the Linux ``perf`` profiler with + DWARF JIT support. + + Set to ``1`` by :option:`-X perf <-X>` command-line option and the + :envvar:`PYTHONPERFSUPPORT` environment variable. + + Set to ``2`` by the :option:`-X perf_jit <-X>` command-line option and + the :envvar:`PYTHON_PERF_JIT_SUPPORT` environment variable. Default: ``-1``. + .. seealso:: + See :ref:`perf_profiling` for more information. + .. versionadded:: 3.12 .. c:member:: int use_environment diff --git a/Doc/c-api/long.rst b/Doc/c-api/long.rst index 098a55c50e219a..e0ae0f77a01db9 100644 --- a/Doc/c-api/long.rst +++ b/Doc/c-api/long.rst @@ -159,7 +159,6 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. .. versionadded:: 3.13 -.. XXX alias PyLong_AS_LONG (for now) .. c:function:: long PyLong_AsLong(PyObject *obj) .. index:: @@ -181,6 +180,16 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. .. versionchanged:: 3.10 This function will no longer use :meth:`~object.__int__`. + .. c:namespace:: NULL + + .. c:function:: long PyLong_AS_LONG(PyObject *obj) + + A :term:`soft deprecated` alias. + Exactly equivalent to the preferred ``PyLong_AsLong``. In particular, + it can fail with :exc:`OverflowError` or another exception. + + .. deprecated:: 3.14 + The function is soft deprecated. .. c:function:: int PyLong_AsInt(PyObject *obj) @@ -570,7 +579,7 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. On failure, return -1 with an exception set. This function always succeeds if *obj* is a :c:type:`PyLongObject` or its subtype. - .. versionadded:: 3.14 + .. versionadded:: next .. c:function:: PyObject* PyLong_GetInfo(void) diff --git a/Doc/c-api/object.rst b/Doc/c-api/object.rst index 1c28f30321bd7a..630114a4339110 100644 --- a/Doc/c-api/object.rst +++ b/Doc/c-api/object.rst @@ -367,14 +367,14 @@ Object Protocol The result will be ``1`` when at least one of the checks returns ``1``, otherwise it will be ``0``. - If *cls* has a :meth:`~class.__subclasscheck__` method, it will be called to + If *cls* has a :meth:`~type.__subclasscheck__` method, it will be called to determine the subclass status as described in :pep:`3119`. Otherwise, *derived* is a subclass of *cls* if it is a direct or indirect subclass, - i.e. contained in ``cls.__mro__``. + i.e. contained in :attr:`cls.__mro__ `. Normally only class objects, i.e. instances of :class:`type` or a derived class, are considered classes. However, objects can override this by having - a :attr:`~class.__bases__` attribute (which must be a tuple of base classes). + a :attr:`~type.__bases__` attribute (which must be a tuple of base classes). .. c:function:: int PyObject_IsInstance(PyObject *inst, PyObject *cls) @@ -386,15 +386,15 @@ Object Protocol The result will be ``1`` when at least one of the checks returns ``1``, otherwise it will be ``0``. - If *cls* has a :meth:`~class.__instancecheck__` method, it will be called to + If *cls* has a :meth:`~type.__instancecheck__` method, it will be called to determine the subclass status as described in :pep:`3119`. Otherwise, *inst* is an instance of *cls* if its class is a subclass of *cls*. An instance *inst* can override what is considered its class by having a - :attr:`~instance.__class__` attribute. + :attr:`~object.__class__` attribute. An object *cls* can override if it is considered a class, and what its base - classes are, by having a :attr:`~class.__bases__` attribute (which must be a tuple + classes are, by having a :attr:`~type.__bases__` attribute (which must be a tuple of base classes). diff --git a/Doc/c-api/type.rst b/Doc/c-api/type.rst index b56da6954f41d4..0031708c4680cc 100644 --- a/Doc/c-api/type.rst +++ b/Doc/c-api/type.rst @@ -53,7 +53,8 @@ Type Objects .. c:function:: PyObject* PyType_GetDict(PyTypeObject* type) Return the type object's internal namespace, which is otherwise only - exposed via a read-only proxy (``cls.__dict__``). This is a + exposed via a read-only proxy (:attr:`cls.__dict__ `). + This is a replacement for accessing :c:member:`~PyTypeObject.tp_dict` directly. The returned dictionary must be treated as read-only. @@ -140,7 +141,7 @@ Type Objects Return true if *a* is a subtype of *b*. This function only checks for actual subtypes, which means that - :meth:`~class.__subclasscheck__` is not called on *b*. Call + :meth:`~type.__subclasscheck__` is not called on *b*. Call :c:func:`PyObject_IsSubclass` to do the same check that :func:`issubclass` would do. @@ -174,29 +175,30 @@ Type Objects .. c:function:: PyObject* PyType_GetName(PyTypeObject *type) - Return the type's name. Equivalent to getting the type's ``__name__`` attribute. + Return the type's name. Equivalent to getting the type's + :attr:`~type.__name__` attribute. .. versionadded:: 3.11 .. c:function:: PyObject* PyType_GetQualName(PyTypeObject *type) Return the type's qualified name. Equivalent to getting the - type's ``__qualname__`` attribute. + type's :attr:`~type.__qualname__` attribute. .. versionadded:: 3.11 .. c:function:: PyObject* PyType_GetFullyQualifiedName(PyTypeObject *type) Return the type's fully qualified name. Equivalent to - ``f"{type.__module__}.{type.__qualname__}"``, or ``type.__qualname__`` if - ``type.__module__`` is not a string or is equal to ``"builtins"``. + ``f"{type.__module__}.{type.__qualname__}"``, or :attr:`type.__qualname__` + if :attr:`type.__module__` is not a string or is equal to ``"builtins"``. .. versionadded:: 3.13 .. c:function:: PyObject* PyType_GetModuleName(PyTypeObject *type) - Return the type's module name. Equivalent to getting the ``type.__module__`` - attribute. + Return the type's module name. Equivalent to getting the + :attr:`type.__module__` attribute. .. versionadded:: 3.13 diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst index cfe4563d744b8a..da1b5092fbf787 100644 --- a/Doc/c-api/typeobj.rst +++ b/Doc/c-api/typeobj.rst @@ -567,12 +567,12 @@ and :c:data:`PyType_Type` effectively act as defaults.) For :ref:`statically allocated type objects `, the *tp_name* field should contain a dot. - Everything before the last dot is made accessible as the :attr:`__module__` + Everything before the last dot is made accessible as the :attr:`~type.__module__` attribute, and everything after the last dot is made accessible as the - :attr:`~definition.__name__` attribute. + :attr:`~type.__name__` attribute. If no dot is present, the entire :c:member:`~PyTypeObject.tp_name` field is made accessible as the - :attr:`~definition.__name__` attribute, and the :attr:`__module__` attribute is undefined + :attr:`~type.__name__` attribute, and the :attr:`~type.__module__` attribute is undefined (unless explicitly set in the dictionary, as explained above). This means your type will be impossible to pickle. Additionally, it will not be listed in module documentations created with pydoc. @@ -1131,7 +1131,7 @@ and :c:data:`PyType_Type` effectively act as defaults.) .. c:macro:: Py_TPFLAGS_MANAGED_DICT - This bit indicates that instances of the class have a ``__dict__`` + This bit indicates that instances of the class have a `~object.__dict__` attribute, and that the space for the dictionary is managed by the VM. If this flag is set, :c:macro:`Py_TPFLAGS_HAVE_GC` should also be set. @@ -1335,8 +1335,8 @@ and :c:data:`PyType_Type` effectively act as defaults.) .. c:member:: const char* PyTypeObject.tp_doc An optional pointer to a NUL-terminated C string giving the docstring for this - type object. This is exposed as the :attr:`__doc__` attribute on the type and - instances of the type. + type object. This is exposed as the :attr:`~type.__doc__` attribute on the + type and instances of the type. **Inheritance:** @@ -2036,7 +2036,7 @@ and :c:data:`PyType_Type` effectively act as defaults.) A collection of subclasses. Internal use only. May be an invalid pointer. To get a list of subclasses, call the Python method - :py:meth:`~class.__subclasses__`. + :py:meth:`~type.__subclasses__`. .. versionchanged:: 3.12 diff --git a/Doc/c-api/unicode.rst b/Doc/c-api/unicode.rst index 958fafd47ac81b..b2ac0c903c2bd7 100644 --- a/Doc/c-api/unicode.rst +++ b/Doc/c-api/unicode.rst @@ -317,7 +317,7 @@ These APIs can be used to work with surrogates: .. c:function:: Py_UCS4 Py_UNICODE_JOIN_SURROGATES(Py_UCS4 high, Py_UCS4 low) - Join two surrogate characters and return a single :c:type:`Py_UCS4` value. + Join two surrogate code points and return a single :c:type:`Py_UCS4` value. *high* and *low* are respectively the leading and trailing surrogates in a surrogate pair. *high* must be in the range [0xD800; 0xDBFF] and *low* must be in the range [0xDC00; 0xDFFF]. @@ -338,6 +338,8 @@ APIs: This is the recommended way to allocate a new Unicode object. Objects created using this function are not resizable. + On error, set an exception and return ``NULL``. + .. versionadded:: 3.3 @@ -614,6 +616,8 @@ APIs: Return the length of the Unicode object, in code points. + On error, set an exception and return ``-1``. + .. versionadded:: 3.3 @@ -657,6 +661,8 @@ APIs: not out of bounds, and that the object can be modified safely (i.e. that it its reference count is one). + Return ``0`` on success, ``-1`` on error with an exception set. + .. versionadded:: 3.3 @@ -666,6 +672,8 @@ APIs: Unicode object and the index is not out of bounds, in contrast to :c:func:`PyUnicode_READ_CHAR`, which performs no error checking. + Return character on success, ``-1`` on error with an exception set. + .. versionadded:: 3.3 @@ -674,6 +682,7 @@ APIs: Return a substring of *unicode*, from character index *start* (included) to character index *end* (excluded). Negative indices are not supported. + On error, set an exception and return ``NULL``. .. versionadded:: 3.3 @@ -990,6 +999,9 @@ These are the UTF-8 codec APIs: object. Error handling is "strict". Return ``NULL`` if an exception was raised by the codec. + The function fails if the string contains surrogate code points + (``U+D800`` - ``U+DFFF``). + .. c:function:: const char* PyUnicode_AsUTF8AndSize(PyObject *unicode, Py_ssize_t *size) @@ -1002,6 +1014,9 @@ These are the UTF-8 codec APIs: On error, set an exception, set *size* to ``-1`` (if it's not NULL) and return ``NULL``. + The function fails if the string contains surrogate code points + (``U+D800`` - ``U+DFFF``). + This caches the UTF-8 representation of the string in the Unicode object, and subsequent calls will return a pointer to the same buffer. The caller is not responsible for deallocating the buffer. The buffer is deallocated and @@ -1429,8 +1444,9 @@ They all return ``NULL`` or ``-1`` if an exception occurs. Compare a Unicode object with a char buffer which is interpreted as being UTF-8 or ASCII encoded and return true (``1``) if they are equal, or false (``0``) otherwise. - If the Unicode object contains surrogate characters or - the C string is not valid UTF-8, false (``0``) is returned. + If the Unicode object contains surrogate code points + (``U+D800`` - ``U+DFFF``) or the C string is not valid UTF-8, + false (``0``) is returned. This function does not raise exceptions. @@ -1534,7 +1550,7 @@ PyUnicodeWriter The :c:type:`PyUnicodeWriter` API can be used to create a Python :class:`str` object. -.. versionadded:: 3.14 +.. versionadded:: next .. c:type:: PyUnicodeWriter diff --git a/Doc/conf.py b/Doc/conf.py index 27cf03d6bea05a..5f22340ac434c9 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -413,8 +413,8 @@ \let\endVerbatim=\endOriginalVerbatim \setcounter{tocdepth}{2} ''', - # The paper size ('letter' or 'a4'). - 'papersize': 'a4', + # The paper size ('letterpaper' or 'a4paper'). + 'papersize': 'a4paper', # The font size ('10pt', '11pt' or '12pt'). 'pointsize': '10pt', } diff --git a/Doc/deprecations/pending-removal-in-3.16.rst b/Doc/deprecations/pending-removal-in-3.16.rst index 446cc63cb34ff9..fc2ef33de5e5cc 100644 --- a/Doc/deprecations/pending-removal-in-3.16.rst +++ b/Doc/deprecations/pending-removal-in-3.16.rst @@ -18,6 +18,14 @@ Pending Removal in Python 3.16 Use the ``'w'`` format code (:c:type:`Py_UCS4`) for Unicode characters instead. +* :mod:`asyncio`: + + * :mod:`asyncio`: + :func:`!asyncio.iscoroutinefunction` is deprecated + and will be removed in Python 3.16, + use :func:`inspect.iscoroutinefunction` instead. + (Contributed by Jiahao Li and Kumar Aditya in :gh:`122875`.) + * :mod:`shutil`: * The :class:`!ExecError` exception diff --git a/Doc/extending/newtypes.rst b/Doc/extending/newtypes.rst index fd05c82b41629a..7f57a3a6aac0ed 100644 --- a/Doc/extending/newtypes.rst +++ b/Doc/extending/newtypes.rst @@ -296,7 +296,7 @@ An interesting advantage of using the :c:member:`~PyTypeObject.tp_members` table descriptors that are used at runtime is that any attribute defined this way can have an associated doc string simply by providing the text in the table. An application can use the introspection API to retrieve the descriptor from the -class object, and get the doc string using its :attr:`!__doc__` attribute. +class object, and get the doc string using its :attr:`~type.__doc__` attribute. As with the :c:member:`~PyTypeObject.tp_methods` table, a sentinel entry with a :c:member:`~PyMethodDef.ml_name` value of ``NULL`` is required. diff --git a/Doc/extending/newtypes_tutorial.rst b/Doc/extending/newtypes_tutorial.rst index b8f437f8d2646e..bcf938f117d148 100644 --- a/Doc/extending/newtypes_tutorial.rst +++ b/Doc/extending/newtypes_tutorial.rst @@ -144,7 +144,7 @@ only used for variable-sized objects and should otherwise be zero. If you want your type to be subclassable from Python, and your type has the same :c:member:`~PyTypeObject.tp_basicsize` as its base type, you may have problems with multiple inheritance. A Python subclass of your type will have to list your type first - in its :attr:`~class.__bases__`, or else it will not be able to call your type's + in its :attr:`~type.__bases__`, or else it will not be able to call your type's :meth:`~object.__new__` method without getting an error. You can avoid this problem by ensuring that your type has a larger value for :c:member:`~PyTypeObject.tp_basicsize` than its base type does. Most of the time, this will be true anyway, because either your diff --git a/Doc/faq/programming.rst b/Doc/faq/programming.rst index 8f9b464ccbfcb7..fa7b22bde1dc6f 100644 --- a/Doc/faq/programming.rst +++ b/Doc/faq/programming.rst @@ -1613,9 +1613,16 @@ method too, and it must do so carefully. The basic implementation of self.__dict__[name] = value ... -Most :meth:`!__setattr__` implementations must modify -:meth:`self.__dict__ ` to store -local state for self without causing an infinite recursion. +Many :meth:`~object.__setattr__` implementations call :meth:`!object.__setattr__` to set +an attribute on self without causing infinite recursion:: + + class X: + def __setattr__(self, name, value): + # Custom logic here... + object.__setattr__(self, name, value) + +Alternatively, it is possible to set attributes by inserting +entries into :attr:`self.__dict__ ` directly. How do I call a method defined in a base class from a derived class that extends it? diff --git a/Doc/glossary.rst b/Doc/glossary.rst index b3fd3c96b5c217..c9d3eba66b07d9 100644 --- a/Doc/glossary.rst +++ b/Doc/glossary.rst @@ -347,7 +347,7 @@ Glossary docstring A string literal which appears as the first expression in a class, function or module. While ignored when the suite is executed, it is - recognized by the compiler and put into the :attr:`!__doc__` attribute + recognized by the compiler and put into the :attr:`~definition.__doc__` attribute of the enclosing class, function or module. Since it is available via introspection, it is the canonical place for documentation of the object. @@ -1241,7 +1241,7 @@ Glossary type The type of a Python object determines what kind of object it is; every object has a type. An object's type is accessible as its - :attr:`~instance.__class__` attribute or can be retrieved with + :attr:`~object.__class__` attribute or can be retrieved with ``type(obj)``. type alias diff --git a/Doc/howto/annotations.rst b/Doc/howto/annotations.rst index e9fc563f1b5880..78f3704ba5d000 100644 --- a/Doc/howto/annotations.rst +++ b/Doc/howto/annotations.rst @@ -107,9 +107,9 @@ Your code will have to have a separate code path if the object you're examining is a class (``isinstance(o, type)``). In that case, best practice relies on an implementation detail of Python 3.9 and before: if a class has annotations defined, -they are stored in the class's ``__dict__`` dictionary. Since +they are stored in the class's :attr:`~type.__dict__` dictionary. Since the class may or may not have annotations defined, best practice -is to call the ``get`` method on the class dict. +is to call the :meth:`~dict.get` method on the class dict. To put it all together, here is some sample code that safely accesses the ``__annotations__`` attribute on an arbitrary @@ -126,8 +126,8 @@ the type of ``ann`` using :func:`isinstance` before further examination. Note that some exotic or malformed type objects may not have -a ``__dict__`` attribute, so for extra safety you may also wish -to use :func:`getattr` to access ``__dict__``. +a :attr:`~type.__dict__` attribute, so for extra safety you may also wish +to use :func:`getattr` to access :attr:`!__dict__`. Manually Un-Stringizing Stringized Annotations @@ -247,4 +247,5 @@ on the class, you may observe unexpected behavior; see quirks by using :func:`annotationlib.get_annotations` on Python 3.14+ or :func:`inspect.get_annotations` on Python 3.10+. On earlier versions of Python, you can avoid these bugs by accessing the annotations from the -class's ``__dict__`` (e.g., ``cls.__dict__.get('__annotations__', None)``). +class's :attr:`~type.__dict__` +(e.g., ``cls.__dict__.get('__annotations__', None)``). diff --git a/Doc/howto/descriptor.rst b/Doc/howto/descriptor.rst index d1101648f9d8ae..01264bfe823746 100644 --- a/Doc/howto/descriptor.rst +++ b/Doc/howto/descriptor.rst @@ -562,8 +562,8 @@ attribute access. The expression ``obj.x`` looks up the attribute ``x`` in the chain of namespaces for ``obj``. If the search finds a descriptor outside of the -instance ``__dict__``, its :meth:`__get__` method is invoked according to the -precedence rules listed below. +instance :attr:`~object.__dict__`, its :meth:`~object.__get__` method is +invoked according to the precedence rules listed below. The details of invocation depend on whether ``obj`` is an object, class, or instance of super. diff --git a/Doc/howto/enum.rst b/Doc/howto/enum.rst index f406873226196b..66929b4104d8de 100644 --- a/Doc/howto/enum.rst +++ b/Doc/howto/enum.rst @@ -608,7 +608,7 @@ The solution is to specify the module name explicitly as follows:: the source, pickling will be disabled. The new pickle protocol 4 also, in some circumstances, relies on -:attr:`~definition.__qualname__` being set to the location where pickle will be able +:attr:`~type.__qualname__` being set to the location where pickle will be able to find the class. For example, if the class was made available in class SomeData in the global scope:: diff --git a/Doc/howto/free-threading-python.rst b/Doc/howto/free-threading-python.rst new file mode 100644 index 00000000000000..b21e3287ecaa3f --- /dev/null +++ b/Doc/howto/free-threading-python.rst @@ -0,0 +1,154 @@ +.. _freethreading-python-howto: + +********************************************** +Python experimental support for free threading +********************************************** + +Starting with the 3.13 release, CPython has experimental support for a build of +Python called :term:`free threading` where the :term:`global interpreter lock` +(GIL) is disabled. Free-threaded execution allows for full utilization of the +available processing power by running threads in parallel on available CPU cores. +While not all software will benefit from this automatically, programs +designed with threading in mind will run faster on multi-core hardware. + +**The free-threaded mode is experimental** and work is ongoing to improve it: +expect some bugs and a substantial single-threaded performance hit. + +This document describes the implications of free threading +for Python code. See :ref:`freethreading-extensions-howto` for information on +how to write C extensions that support the free-threaded build. + +.. seealso:: + + :pep:`703` – Making the Global Interpreter Lock Optional in CPython for an + overall description of free-threaded Python. + + +Installation +============ + +Starting with Python 3.13, the official macOS and Windows installers +optionally support installing free-threaded Python binaries. The installers +are available at https://www.python.org/downloads/. + +For information on other platforms, see the `Installing a Free-Threaded Python +`_, a +community-maintained installation guide for installing free-threaded Python. + +When building CPython from source, the :option:`--disable-gil` configure option +should be used to build a free-threaded Python interpreter. + + +Identifying free-threaded Python +================================ + +To check if the current interpreter supports free-threading, :option:`python -VV <-V>` +and :attr:`sys.version` contain "experimental free-threading build". +The new :func:`sys._is_gil_enabled` function can be used to check whether +the GIL is actually disabled in the running process. + +The ``sysconfig.get_config_var("Py_GIL_DISABLED")`` configuration variable can +be used to determine whether the build supports free threading. If the variable +is set to ``1``, then the build supports free threading. This is the recommended +mechanism for decisions related to the build configuration. + + +The global interpreter lock in free-threaded Python +=================================================== + +Free-threaded builds of CPython support optionally running with the GIL enabled +at runtime using the environment variable :envvar:`PYTHON_GIL` or +the command-line option :option:`-X gil`. + +The GIL may also automatically be enabled when importing a C-API extension +module that is not explicitly marked as supporting free threading. A warning +will be printed in this case. + +In addition to individual package documentation, the following websites track +the status of popular packages support for free threading: + +* https://py-free-threading.github.io/tracking/ +* https://hugovk.github.io/free-threaded-wheels/ + + +Thread safety +============= + +The free-threaded build of CPython aims to provide similar thread-safety +behavior at the Python level to the default GIL-enabled build. Built-in +types like :class:`dict`, :class:`list`, and :class:`set` use internal locks +to protect against concurrent modifications in ways that behave similarly to +the GIL. However, Python has not historically guaranteed specific behavior for +concurrent modifications to these built-in types, so this should be treated +as a description of the current implementation, not a guarantee of current or +future behavior. + +.. note:: + + It's recommended to use the :class:`threading.Lock` or other synchronization + primitives instead of relying on the internal locks of built-in types, when + possible. + + +Known limitations +================= + +This section describes known limitations of the free-threaded CPython build. + +Immortalization +--------------- + +The free-threaded build of the 3.13 release makes some objects :term:`immortal`. +Immortal objects are not deallocated and have reference counts that are +never modified. This is done to avoid reference count contention that would +prevent efficient multi-threaded scaling. + +An object will be made immortal when a new thread is started for the first time +after the main thread is running. The following objects are immortalized: + +* :ref:`function ` objects declared at the module level +* :ref:`method ` descriptors +* :ref:`code ` objects +* :term:`module` objects and their dictionaries +* :ref:`classes ` (type objects) + +Because immortal objects are never deallocated, applications that create many +objects of these types may see increased memory usage. This is expected to be +addressed in the 3.14 release. + +Additionally, numeric and string literals in the code as well as strings +returned by :func:`sys.intern` are also immortalized. This behavior is +expected to remain in the 3.14 free-threaded build. + + +Frame objects +------------- + +It is not safe to access :ref:`frame ` objects from other +threads and doing so may cause your program to crash . This means that +:func:`sys._current_frames` is generally not safe to use in a free-threaded +build. Functions like :func:`inspect.currentframe` and :func:`sys._getframe` +are generally safe as long as the resulting frame object is not passed to +another thread. + +Iterators +--------- + +Sharing the same iterator object between multiple threads is generally not +safe and threads may see duplicate or missing elements when iterating or crash +the interpreter. + + +Single-threaded performance +--------------------------- + +The free-threaded build has additional overhead when executing Python code +compared to the default GIL-enabled build. In 3.13, this overhead is about +40% on the `pyperformance `_ suite. +Programs that spend most of their time in C extensions or I/O will see +less of an impact. The largest impact is because the specializing adaptive +interpreter (:pep:`659`) is disabled in the free-threaded build. We expect +to re-enable it in a thread-safe way in the 3.14 release. This overhead is +expected to be reduced in upcoming Python release. We are aiming for an +overhead of 10% or less on the pyperformance suite compared to the default +GIL-enabled build. diff --git a/Doc/howto/index.rst b/Doc/howto/index.rst index a882f1747084fe..c09f92c9528ee1 100644 --- a/Doc/howto/index.rst +++ b/Doc/howto/index.rst @@ -32,6 +32,7 @@ Python Library Reference. isolating-extensions.rst timerfd.rst mro.rst + free-threading-python.rst free-threading-extensions.rst General: @@ -52,6 +53,7 @@ General: Advanced development: * :ref:`curses-howto` +* :ref:`freethreading-python-howto` * :ref:`freethreading-extensions-howto` * :ref:`isolating-extensions-howto` * :ref:`python_2.3_mro` diff --git a/Doc/howto/mro.rst b/Doc/howto/mro.rst index f44b4f98e570bd..46db516e16dae4 100644 --- a/Doc/howto/mro.rst +++ b/Doc/howto/mro.rst @@ -335,7 +335,7 @@ E is more specialized than C, even if it is in a higher level. A lazy programmer can obtain the MRO directly from Python 2.2, since in this case it coincides with the Python 2.3 linearization. It is enough -to invoke the .mro() method of class A: +to invoke the :meth:`~type.mro` method of class A: >>> A.mro() # doctest: +NORMALIZE_WHITESPACE [, , , diff --git a/Doc/howto/sorting.rst b/Doc/howto/sorting.rst index b98f91e023bdfc..70c34cde8a0659 100644 --- a/Doc/howto/sorting.rst +++ b/Doc/howto/sorting.rst @@ -47,11 +47,14 @@ lists. In contrast, the :func:`sorted` function accepts any iterable. Key Functions ============= -Both :meth:`list.sort` and :func:`sorted` have a *key* parameter to specify a -function (or other callable) to be called on each list element prior to making +The :meth:`list.sort` method and the functions :func:`sorted`, +:func:`min`, :func:`max`, :func:`heapq.nsmallest`, and +:func:`heapq.nlargest` have a *key* parameter to specify a function (or +other callable) to be called on each list element prior to making comparisons. -For example, here's a case-insensitive string comparison: +For example, here's a case-insensitive string comparison using +:meth:`str.casefold`: .. doctest:: @@ -272,6 +275,70 @@ to make it usable as a key function:: sorted(words, key=cmp_to_key(strcoll)) # locale-aware sort order +Strategies For Unorderable Types and Values +=========================================== + +A number of type and value issues can arise when sorting. +Here are some strategies that can help: + +* Convert non-comparable input types to strings prior to sorting: + +.. doctest:: + + >>> data = ['twelve', '11', 10] + >>> sorted(map(str, data)) + ['10', '11', 'twelve'] + +This is needed because most cross-type comparisons raise a +:exc:`TypeError`. + +* Remove special values prior to sorting: + +.. doctest:: + + >>> from math import isnan + >>> from itertools import filterfalse + >>> data = [3.3, float('nan'), 1.1, 2.2] + >>> sorted(filterfalse(isnan, data)) + [1.1, 2.2, 3.3] + +This is needed because the `IEEE-754 standard +`_ specifies that, "Every NaN +shall compare unordered with everything, including itself." + +Likewise, ``None`` can be stripped from datasets as well: + +.. doctest:: + + >>> data = [3.3, None, 1.1, 2.2] + >>> sorted(x for x in data if x is not None) + [1.1, 2.2, 3.3] + +This is needed because ``None`` is not comparable to other types. + +* Convert mapping types into sorted item lists before sorting: + +.. doctest:: + + >>> data = [{'a': 1}, {'b': 2}] + >>> sorted(data, key=lambda d: sorted(d.items())) + [{'a': 1}, {'b': 2}] + +This is needed because dict-to-dict comparisons raise a +:exc:`TypeError`. + +* Convert set types into sorted lists before sorting: + +.. doctest:: + + >>> data = [{'a', 'b', 'c'}, {'b', 'c', 'd'}] + >>> sorted(map(sorted, data)) + [['a', 'b', 'c'], ['b', 'c', 'd']] + +This is needed because the elements contained in set types do not have a +deterministic order. For example, ``list({'a', 'b'})`` may produce +either ``['a', 'b']`` or ``['b', 'a']``. + Odds and Ends ============= diff --git a/Doc/library/abc.rst b/Doc/library/abc.rst index 168ef3ec00d81b..38d744e97d087d 100644 --- a/Doc/library/abc.rst +++ b/Doc/library/abc.rst @@ -99,7 +99,7 @@ a helper class :class:`ABC` to alternatively define ABCs through inheritance: that you can customize the behavior of :func:`issubclass` further without the need to call :meth:`register` on every class you want to consider a subclass of the ABC. (This class method is called from the - :meth:`~class.__subclasscheck__` method of the ABC.) + :meth:`~type.__subclasscheck__` method of the ABC.) This method should return ``True``, ``False`` or :data:`NotImplemented`. If it returns ``True``, the *subclass* is considered a subclass of this ABC. @@ -149,7 +149,7 @@ a helper class :class:`ABC` to alternatively define ABCs through inheritance: The :meth:`__subclasshook__` class method defined here says that any class that has an :meth:`~iterator.__iter__` method in its :attr:`~object.__dict__` (or in that of one of its base classes, accessed - via the :attr:`~class.__mro__` list) is considered a ``MyIterable`` too. + via the :attr:`~type.__mro__` list) is considered a ``MyIterable`` too. Finally, the last line makes ``Foo`` a virtual subclass of ``MyIterable``, even though it does not define an :meth:`~iterator.__iter__` method (it uses diff --git a/Doc/library/annotationlib.rst b/Doc/library/annotationlib.rst index 1e72c5421674bc..37490456d13312 100644 --- a/Doc/library/annotationlib.rst +++ b/Doc/library/annotationlib.rst @@ -32,7 +32,7 @@ This module supports retrieving annotations in three main formats for annotations that cannot be resolved, allowing you to inspect the annotations without evaluating them. This is useful when you need to work with annotations that may contain unresolved forward references. -* :attr:`~Format.SOURCE` returns the annotations as a string, similar +* :attr:`~Format.STRING` returns the annotations as a string, similar to how it would appear in the source file. This is useful for documentation generators that want to display annotations in a readable way. @@ -135,7 +135,7 @@ Classes values. Real objects may contain references to, :class:`ForwardRef` proxy objects. - .. attribute:: SOURCE + .. attribute:: STRING :value: 3 Values are the text string of the annotation as it appears in the @@ -197,6 +197,27 @@ Classes Functions --------- +.. function:: annotations_to_string(annotations) + + Convert an annotations dict containing runtime values to a + dict containing only strings. If the values are not already strings, + they are converted using :func:`value_to_string`. + This is meant as a helper for user-provided + annotate functions that support the :attr:`~Format.STRING` format but + do not have access to the code creating the annotations. + + For example, this is used to implement the :attr:`~Format.STRING` for + :class:`typing.TypedDict` classes created through the functional syntax: + + .. doctest:: + + >>> from typing import TypedDict + >>> Movie = TypedDict("movie", {"name": str, "year": int}) + >>> get_annotations(Movie, format=Format.STRING) + {'name': 'str', 'year': 'int'} + + .. versionadded:: 3.14 + .. function:: call_annotate_function(annotate, format, *, owner=None) Call the :term:`annotate function` *annotate* with the given *format*, @@ -261,7 +282,7 @@ Functions NameError: name 'undefined' is not defined >>> call_evaluate_function(Alias.evaluate_value, Format.FORWARDREF) ForwardRef('undefined') - >>> call_evaluate_function(Alias.evaluate_value, Format.SOURCE) + >>> call_evaluate_function(Alias.evaluate_value, Format.STRING) 'undefined' .. versionadded:: 3.14 @@ -347,3 +368,18 @@ Functions {'a': , 'b': , 'return': } .. versionadded:: 3.14 + +.. function:: value_to_string(value) + + Convert an arbitrary Python value to a format suitable for use by the + :attr:`~Format.STRING` format. This calls :func:`repr` for most + objects, but has special handling for some objects, such as type objects. + + This is meant as a helper for user-provided + annotate functions that support the :attr:`~Format.STRING` format but + do not have access to the code creating the annotations. It can also + be used to provide a user-friendly string representation for other + objects that contain values that are commonly encountered in annotations. + + .. versionadded:: 3.14 + diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst index f22aeb8c13c605..a4683bccf651cd 100644 --- a/Doc/library/argparse.rst +++ b/Doc/library/argparse.rst @@ -25,6 +25,25 @@ will figure out how to parse those out of :data:`sys.argv`. The :mod:`argparse` module also automatically generates help and usage messages. The module will also issue errors when users give the program invalid arguments. +Quick Links for ArgumentParser +--------------------------------------- +========================= =========================================================================================================== ================================================================================== +Name Description Values +========================= =========================================================================================================== ================================================================================== +prog_ The name of the program Defaults to ``os.path.basename(sys.argv[0])`` +usage_ The string describing the program usage +description_ A brief description of what the program does +epilog_ Additional description of the program after the argument help +parents_ A list of :class:`ArgumentParser` objects whose arguments should also be included +formatter_class_ A class for customizing the help output ``argparse.HelpFormatter`` +prefix_chars_ The set of characters that prefix optional arguments Defaults to ``'-'`` +fromfile_prefix_chars_ The set of characters that prefix files to read additional arguments from Defaults to ``None`` (meaning arguments will never be treated as file references) +argument_default_ The global default value for arguments +allow_abbrev_ Allows long options to be abbreviated if the abbreviation is unambiguous ``True`` or ``False`` (default: ``True``) +conflict_handler_ The strategy for resolving conflicting optionals +add_help_ Add a ``-h/--help`` option to the parser ``True`` or ``False`` (default: ``True``) +exit_on_error_ Determines whether or not to exit with error info when an error occurs ``True`` or ``False`` (default: ``True``) +========================= =========================================================================================================== ================================================================================== Core Functionality ------------------ @@ -1123,6 +1142,9 @@ is used when no command-line argument was present:: >>> parser.parse_args([]) Namespace(foo=42) +For required_ arguments, the ``default`` value is ignored. For example, this +applies to positional arguments with nargs_ values other than ``?`` or ``*``, +or optional arguments marked as ``required=True``. Providing ``default=argparse.SUPPRESS`` causes no attribute to be added if the command-line argument was not present:: @@ -1801,7 +1823,7 @@ Sub-commands >>> >>> # create the parser for the "b" command >>> parser_b = subparsers.add_parser('b', help='b help') - >>> parser_b.add_argument('--baz', choices='XYZ', help='baz help') + >>> parser_b.add_argument('--baz', choices=('X', 'Y', 'Z'), help='baz help') >>> >>> # parse some argument lists >>> parser.parse_args(['a', '12']) diff --git a/Doc/library/ast.rst b/Doc/library/ast.rst index 55007624c876fa..a9518859b83478 100644 --- a/Doc/library/ast.rst +++ b/Doc/library/ast.rst @@ -178,9 +178,9 @@ Root nodes A Python module, as with :ref:`file input `. Node type generated by :func:`ast.parse` in the default ``"exec"`` *mode*. - *body* is a :class:`list` of the module's :ref:`ast-statements`. + ``body`` is a :class:`list` of the module's :ref:`ast-statements`. - *type_ignores* is a :class:`list` of the module's type ignore comments; + ``type_ignores`` is a :class:`list` of the module's type ignore comments; see :func:`ast.parse` for more details. .. doctest:: @@ -199,7 +199,7 @@ Root nodes A single Python :ref:`expression input `. Node type generated by :func:`ast.parse` when *mode* is ``"eval"``. - *body* is a single node, + ``body`` is a single node, one of the :ref:`expression types `. .. doctest:: @@ -214,7 +214,7 @@ Root nodes A single :ref:`interactive input `, like in :ref:`tut-interac`. Node type generated by :func:`ast.parse` when *mode* is ``"single"``. - *body* is a :class:`list` of :ref:`statement nodes `. + ``body`` is a :class:`list` of :ref:`statement nodes `. .. doctest:: @@ -243,9 +243,9 @@ Root nodes # type: (int, int) -> int return a + b - *argtypes* is a :class:`list` of :ref:`expression nodes `. + ``argtypes`` is a :class:`list` of :ref:`expression nodes `. - *returns* is a single :ref:`expression node `. + ``returns`` is a single :ref:`expression node `. .. doctest:: @@ -1771,9 +1771,9 @@ aliases. .. class:: TypeVar(name, bound, default_value) - A :class:`typing.TypeVar`. *name* is the name of the type variable. - *bound* is the bound or constraints, if any. If *bound* is a :class:`Tuple`, - it represents constraints; otherwise it represents the bound. *default_value* + A :class:`typing.TypeVar`. ``name`` is the name of the type variable. + ``bound`` is the bound or constraints, if any. If ``bound`` is a :class:`Tuple`, + it represents constraints; otherwise it represents the bound. ``default_value`` is the default value; if the :class:`!TypeVar` has no default, this attribute will be set to ``None``. @@ -1801,8 +1801,8 @@ aliases. .. class:: ParamSpec(name, default_value) - A :class:`typing.ParamSpec`. *name* is the name of the parameter specification. - *default_value* is the default value; if the :class:`!ParamSpec` has no default, + A :class:`typing.ParamSpec`. ``name`` is the name of the parameter specification. + ``default_value`` is the default value; if the :class:`!ParamSpec` has no default, this attribute will be set to ``None``. .. doctest:: @@ -1836,8 +1836,8 @@ aliases. .. class:: TypeVarTuple(name, default_value) - A :class:`typing.TypeVarTuple`. *name* is the name of the type variable tuple. - *default_value* is the default value; if the :class:`!TypeVarTuple` has no + A :class:`typing.TypeVarTuple`. ``name`` is the name of the type variable tuple. + ``default_value`` is the default value; if the :class:`!TypeVarTuple` has no default, this attribute will be set to ``None``. .. doctest:: @@ -2491,7 +2491,7 @@ effects on the compilation of a program: differ in whitespace or similar details. Attributes include line numbers and column offsets. - .. versionadded:: 3.14 + .. versionadded:: next .. _ast-cli: diff --git a/Doc/library/asyncio-runner.rst b/Doc/library/asyncio-runner.rst index 8312e55126a7c5..28d5aaf3692baa 100644 --- a/Doc/library/asyncio-runner.rst +++ b/Doc/library/asyncio-runner.rst @@ -24,11 +24,13 @@ Running an asyncio Program .. function:: run(coro, *, debug=None, loop_factory=None) - Execute the :term:`coroutine` *coro* and return the result. + Execute *coro* in an asyncio event loop and return the result. - This function runs the passed coroutine, taking care of - managing the asyncio event loop, *finalizing asynchronous - generators*, and closing the executor. + The argument can be any awaitable object. + + This function runs the awaitable, taking care of managing the + asyncio event loop, *finalizing asynchronous generators*, and + closing the executor. This function cannot be called when another asyncio event loop is running in the same thread. @@ -70,6 +72,10 @@ Running an asyncio Program Added *loop_factory* parameter. + .. versionchanged:: 3.14 + + *coro* can be any awaitable object. + Runner context manager ====================== @@ -104,17 +110,25 @@ Runner context manager .. method:: run(coro, *, context=None) - Run a :term:`coroutine ` *coro* in the embedded loop. + Execute *coro* in the embedded event loop. + + The argument can be any awaitable object. - Return the coroutine's result or raise its exception. + If the argument is a coroutine, it is wrapped in a Task. An optional keyword-only *context* argument allows specifying a - custom :class:`contextvars.Context` for the *coro* to run in. - The runner's default context is used if ``None``. + custom :class:`contextvars.Context` for the code to run in. + The runner's default context is used if context is ``None``. + + Returns the awaitable's result or raises an exception. This function cannot be called when another asyncio event loop is running in the same thread. + .. versionchanged:: 3.14 + + *coro* can be any awaitable object. + .. method:: close() Close the runner. diff --git a/Doc/library/calendar.rst b/Doc/library/calendar.rst index d5876054da3eee..eafc038d6cb722 100644 --- a/Doc/library/calendar.rst +++ b/Doc/library/calendar.rst @@ -393,13 +393,22 @@ The :mod:`calendar` module exports the following data attributes: .. data:: day_name - An array that represents the days of the week in the current locale. + A sequence that represents the days of the week in the current locale, + where Monday is day number 0. + + >>> import calendar + >>> list(calendar.day_name) + ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'] .. data:: day_abbr - An array that represents the abbreviated days of the week in the current locale. + A sequence that represents the abbreviated days of the week in the current locale, + where Mon is day number 0. + >>> import calendar + >>> list(calendar.day_abbr) + ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] .. data:: MONDAY TUESDAY @@ -426,17 +435,24 @@ The :mod:`calendar` module exports the following data attributes: .. data:: month_name - An array that represents the months of the year in the current locale. This + A sequence that represents the months of the year in the current locale. This follows normal convention of January being month number 1, so it has a length of 13 and ``month_name[0]`` is the empty string. + >>> import calendar + >>> list(calendar.month_name) + ['', 'January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'] + .. data:: month_abbr - An array that represents the abbreviated months of the year in the current + A sequence that represents the abbreviated months of the year in the current locale. This follows normal convention of January being month number 1, so it has a length of 13 and ``month_abbr[0]`` is the empty string. + >>> import calendar + >>> list(calendar.month_abbr) + ['', 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] .. data:: JANUARY FEBRUARY diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst index cee4e350c498fe..0cc9063f153aba 100644 --- a/Doc/library/collections.rst +++ b/Doc/library/collections.rst @@ -874,8 +874,8 @@ they add the ability to access fields by name instead of position index. ``(1, 2)``, then ``x`` will be a required argument, ``y`` will default to ``1``, and ``z`` will default to ``2``. - If *module* is defined, the ``__module__`` attribute of the named tuple is - set to that value. + If *module* is defined, the :attr:`~type.__module__` attribute of the + named tuple is set to that value. Named tuple instances do not have per-instance dictionaries, so they are lightweight and require no more memory than regular tuples. diff --git a/Doc/library/concurrent.futures.rst b/Doc/library/concurrent.futures.rst index e3b24451188cc4..ce72127127c7a6 100644 --- a/Doc/library/concurrent.futures.rst +++ b/Doc/library/concurrent.futures.rst @@ -286,14 +286,6 @@ to a :class:`ProcessPoolExecutor` will result in deadlock. Added the *initializer* and *initargs* arguments. - .. note:: - The default :mod:`multiprocessing` start method - (see :ref:`multiprocessing-start-methods`) will change away from - *fork* in Python 3.14. Code that requires *fork* be used for their - :class:`ProcessPoolExecutor` should explicitly specify that by - passing a ``mp_context=multiprocessing.get_context("fork")`` - parameter. - .. versionchanged:: 3.11 The *max_tasks_per_child* argument was added to allow users to control the lifetime of workers in the pool. @@ -310,6 +302,12 @@ to a :class:`ProcessPoolExecutor` will result in deadlock. *max_workers* uses :func:`os.process_cpu_count` by default, instead of :func:`os.cpu_count`. + .. versionchanged:: 3.14 + The default process start method (see + :ref:`multiprocessing-start-methods`) changed away from *fork*. If you + require the *fork* start method for :class:`ProcessPoolExecutor` you must + explicitly pass ``mp_context=multiprocessing.get_context("fork")``. + .. _processpoolexecutor-example: ProcessPoolExecutor Example diff --git a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst index a218304653aee9..535c5173be50de 100644 --- a/Doc/library/ctypes.rst +++ b/Doc/library/ctypes.rst @@ -2303,7 +2303,7 @@ These are the fundamental ctypes data types: Represents the C :c:expr:`double complex` datatype, if available. The constructor accepts an optional :class:`complex` initializer. - .. versionadded:: 3.14 + .. versionadded:: next .. class:: c_float_complex diff --git a/Doc/library/dataclasses.rst b/Doc/library/dataclasses.rst index cfca11afbd2e41..51c1a427b63787 100644 --- a/Doc/library/dataclasses.rst +++ b/Doc/library/dataclasses.rst @@ -187,13 +187,6 @@ Module contents If :attr:`!__slots__` is already defined in the class, then :exc:`TypeError` is raised. - .. warning:: - Calling no-arg :func:`super` in dataclasses using ``slots=True`` - will result in the following exception being raised: - ``TypeError: super(type, obj): obj must be an instance or subtype of type``. - The two-arg :func:`super` is a valid workaround. - See :gh:`90562` for full details. - .. warning:: Passing parameters to a base class :meth:`~object.__init_subclass__` when using ``slots=True`` will result in a :exc:`TypeError`. @@ -238,7 +231,7 @@ Module contents follows a field with a default value. This is true whether this occurs in a single class, or as a result of class inheritance. -.. function:: field(*, default=MISSING, default_factory=MISSING, init=True, repr=True, hash=None, compare=True, metadata=None, kw_only=MISSING) +.. function:: field(*, default=MISSING, default_factory=MISSING, init=True, repr=True, hash=None, compare=True, metadata=None, kw_only=MISSING, doc=None) For common and simple use cases, no other functionality is required. There are, however, some dataclass features that @@ -307,6 +300,10 @@ Module contents .. versionadded:: 3.10 + - ``doc``: optional docstring for this field. + + .. versionadded:: 3.13 + If the default value of a field is specified by a call to :func:`!field`, then the class attribute for this field will be replaced by the specified *default* value. If *default* is not diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst index 0e7dc4f262bab4..59e2dbd6847538 100644 --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -548,6 +548,39 @@ Other constructors, all class methods: .. versionadded:: 3.8 +.. classmethod:: date.strptime(date_string, format) + + Return a :class:`.date` corresponding to *date_string*, parsed according to + *format*. This is equivalent to:: + + date(*(time.strptime(date_string, format)[0:3])) + + :exc:`ValueError` is raised if the date_string and format + can't be parsed by :func:`time.strptime` or if it returns a value which isn't a + time tuple. See also :ref:`strftime-strptime-behavior` and + :meth:`date.fromisoformat`. + + .. note:: + + If *format* specifies a day of month without a year a + :exc:`DeprecationWarning` is emitted. This is to avoid a quadrennial + leap year bug in code seeking to parse only a month and day as the + default year used in absence of one in the format is not a leap year. + Such *format* values may raise an error as of Python 3.15. The + workaround is to always include a year in your *format*. If parsing + *date_string* values that do not have a year, explicitly add a year that + is a leap year before parsing: + + .. doctest:: + + >>> from datetime import date + >>> date_string = "02/29" + >>> when = date.strptime(f"{date_string};1984", "%m/%d;%Y") # Avoids leap year bug. + >>> when.strftime("%B %d") # doctest: +SKIP + 'February 29' + + .. versionadded:: 3.14 + Class attributes: @@ -1827,7 +1860,7 @@ In Boolean contexts, a :class:`.time` object is always considered to be true. details. -Other constructor: +Other constructors: .. classmethod:: time.fromisoformat(time_string) @@ -1869,6 +1902,22 @@ Other constructor: Previously, this method only supported formats that could be emitted by :meth:`time.isoformat`. +.. classmethod:: time.strptime(date_string, format) + + Return a :class:`.time` corresponding to *date_string*, parsed according to + *format*. + + If *format* does not contain microseconds or timezone information, this is equivalent to:: + + time(*(time.strptime(date_string, format)[3:6])) + + :exc:`ValueError` is raised if the *date_string* and *format* + cannot be parsed by :func:`time.strptime` or if it returns a value which is not a + time tuple. See also :ref:`strftime-strptime-behavior` and + :meth:`time.fromisoformat`. + + .. versionadded:: 3.14 + Instance methods: @@ -2367,24 +2416,22 @@ Class attributes: ``strftime(format)`` method, to create a string representing the time under the control of an explicit format string. -Conversely, the :meth:`datetime.strptime` class method creates a -:class:`.datetime` object from a string representing a date and time and a -corresponding format string. +Conversely, the :meth:`date.strptime`, :meth:`datetime.strptime` and +:meth:`time.strptime` class methods create an object from a string +representing the time and a corresponding format string. The table below provides a high-level comparison of :meth:`~.datetime.strftime` versus :meth:`~.datetime.strptime`: -+----------------+--------------------------------------------------------+------------------------------------------------------------------------------+ -| | ``strftime`` | ``strptime`` | -+================+========================================================+==============================================================================+ -| Usage | Convert object to a string according to a given format | Parse a string into a :class:`.datetime` object given a corresponding format | -+----------------+--------------------------------------------------------+------------------------------------------------------------------------------+ -| Type of method | Instance method | Class method | -+----------------+--------------------------------------------------------+------------------------------------------------------------------------------+ -| Method of | :class:`date`; :class:`.datetime`; :class:`.time` | :class:`.datetime` | -+----------------+--------------------------------------------------------+------------------------------------------------------------------------------+ -| Signature | ``strftime(format)`` | ``strptime(date_string, format)`` | -+----------------+--------------------------------------------------------+------------------------------------------------------------------------------+ ++----------------+--------------------------------------------------------+------------------------------------------------------------+ +| | ``strftime`` | ``strptime`` | ++================+========================================================+============================================================+ +| Usage | Convert object to a string according to a given format | Parse a string into an object given a corresponding format | ++----------------+--------------------------------------------------------+------------------------------------------------------------+ +| Type of method | Instance method | Class method | ++----------------+--------------------------------------------------------+------------------------------------------------------------+ +| Signature | ``strftime(format)`` | ``strptime(date_string, format)`` | ++----------------+--------------------------------------------------------+------------------------------------------------------------+ .. _format-codes: diff --git a/Doc/library/dis.rst b/Doc/library/dis.rst index cad73192f7cd43..e3919c2ffad84c 100644 --- a/Doc/library/dis.rst +++ b/Doc/library/dis.rst @@ -959,7 +959,7 @@ iterations of the loop. list of constants supported by this instruction. Used by the :keyword:`assert` statement to load :exc:`AssertionError`. - .. versionadded:: 3.14 + .. versionadded:: next .. opcode:: LOAD_BUILD_CLASS @@ -1826,7 +1826,7 @@ iterations of the loop. If ``type(STACK[-1]).__xxx__`` is not a method, leave ``STACK[-1].__xxx__; NULL`` on the stack. - .. versionadded:: 3.14 + .. versionadded:: next **Pseudo-instructions** @@ -1872,6 +1872,12 @@ but are replaced by real opcodes or removed before bytecode is generated. Undirected relative jump instructions which are replaced by their directed (forward/backward) counterparts by the assembler. +.. opcode:: JUMP_IF_TRUE +.. opcode:: JUMP_IF_FALSE + + Conditional jumps which do not impact the stack. Replaced by the sequence + ``COPY 1``, ``TO_BOOL``, ``POP_JUMP_IF_TRUE/FALSE``. + .. opcode:: LOAD_CLOSURE (i) Pushes a reference to the cell contained in slot ``i`` of the "fast locals" diff --git a/Doc/library/email.contentmanager.rst b/Doc/library/email.contentmanager.rst index 34121f8c0a7727..a86e227429b06d 100644 --- a/Doc/library/email.contentmanager.rst +++ b/Doc/library/email.contentmanager.rst @@ -58,11 +58,12 @@ * the type itself (``typ``) * the type's fully qualified name (``typ.__module__ + '.' + typ.__qualname__``). - * the type's qualname (``typ.__qualname__``) - * the type's name (``typ.__name__``). + * the type's :attr:`qualname ` (``typ.__qualname__``) + * the type's :attr:`name ` (``typ.__name__``). If none of the above match, repeat all of the checks above for each of - the types in the :term:`MRO` (``typ.__mro__``). Finally, if no other key + the types in the :term:`MRO` (:attr:`typ.__mro__ `). + Finally, if no other key yields a handler, check for a handler for the key ``None``. If there is no handler for ``None``, raise a :exc:`KeyError` for the fully qualified name of the type. diff --git a/Doc/library/email.headerregistry.rst b/Doc/library/email.headerregistry.rst index bcbd00c833e28e..7f8044932fae99 100644 --- a/Doc/library/email.headerregistry.rst +++ b/Doc/library/email.headerregistry.rst @@ -317,7 +317,7 @@ variant, :attr:`~.BaseHeader.max_count` is set to 1. class. When *use_default_map* is ``True`` (the default), the standard mapping of header names to classes is copied in to the registry during initialization. *base_class* is always the last class in the generated - class's ``__bases__`` list. + class's :class:`~type.__bases__` list. The default mappings are: diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst index b2b0086437f1db..a96f69e6170f00 100644 --- a/Doc/library/functions.rst +++ b/Doc/library/functions.rst @@ -283,9 +283,11 @@ are always available. They are listed here in alphabetical order. :func:`property`. .. versionchanged:: 3.10 - Class methods now inherit the method attributes (``__module__``, - ``__name__``, ``__qualname__``, ``__doc__`` and ``__annotations__``) and - have a new ``__wrapped__`` attribute. + Class methods now inherit the method attributes + (:attr:`~function.__module__`, :attr:`~function.__name__`, + :attr:`~function.__qualname__`, :attr:`~function.__doc__` and + :attr:`~function.__annotations__`) and have a new ``__wrapped__`` + attribute. .. deprecated-removed:: 3.11 3.13 Class methods can no longer wrap other :term:`descriptors ` such as @@ -1286,8 +1288,9 @@ are always available. They are listed here in alphabetical order. .. note:: - :class:`object` does *not* have a :attr:`~object.__dict__`, so you can't - assign arbitrary attributes to an instance of the :class:`object` class. + :class:`object` instances do *not* have :attr:`~object.__dict__` + attributes, so you can't assign arbitrary attributes to an instance of + :class:`object`. .. function:: oct(x) @@ -1907,10 +1910,11 @@ are always available. They are listed here in alphabetical order. For more information on static methods, see :ref:`types`. .. versionchanged:: 3.10 - Static methods now inherit the method attributes (``__module__``, - ``__name__``, ``__qualname__``, ``__doc__`` and ``__annotations__``), - have a new ``__wrapped__`` attribute, and are now callable as regular - functions. + Static methods now inherit the method attributes + (:attr:`~function.__module__`, :attr:`~function.__name__`, + :attr:`~function.__qualname__`, :attr:`~function.__doc__` and + :attr:`~function.__annotations__`), have a new ``__wrapped__`` attribute, + and are now callable as regular functions. .. index:: @@ -1961,11 +1965,11 @@ are always available. They are listed here in alphabetical order. to be searched. The search starts from the class right after the *type*. - For example, if :attr:`~class.__mro__` of *object_or_type* is + For example, if :attr:`~type.__mro__` of *object_or_type* is ``D -> B -> C -> A -> object`` and the value of *type* is ``B``, then :func:`super` searches ``C -> A -> object``. - The :attr:`~class.__mro__` attribute of the class corresponding to + The :attr:`~type.__mro__` attribute of the class corresponding to *object_or_type* lists the method resolution search order used by both :func:`getattr` and :func:`super`. The attribute is dynamic and can change whenever the inheritance hierarchy is updated. @@ -2044,28 +2048,30 @@ are always available. They are listed here in alphabetical order. With one argument, return the type of an *object*. The return value is a type object and generally the same object as returned by - :attr:`object.__class__ `. + :attr:`object.__class__`. The :func:`isinstance` built-in function is recommended for testing the type of an object, because it takes subclasses into account. - With three arguments, return a new type object. This is essentially a dynamic form of the :keyword:`class` statement. The *name* string is - the class name and becomes the :attr:`~definition.__name__` attribute. + the class name and becomes the :attr:`~type.__name__` attribute. The *bases* tuple contains the base classes and becomes the - :attr:`~class.__bases__` attribute; if empty, :class:`object`, the + :attr:`~type.__bases__` attribute; if empty, :class:`object`, the ultimate base of all classes, is added. The *dict* dictionary contains attribute and method definitions for the class body; it may be copied - or wrapped before becoming the :attr:`~object.__dict__` attribute. - The following two statements create identical :class:`type` objects: + or wrapped before becoming the :attr:`~type.__dict__` attribute. + The following two statements create identical :class:`!type` objects: >>> class X: ... a = 1 ... >>> X = type('X', (), dict(a=1)) - See also :ref:`bltin-type-objects`. + See also: + + * :ref:`Documentation on attributes and methods on classes `. + * :ref:`bltin-type-objects` Keyword arguments provided to the three argument form are passed to the appropriate metaclass machinery (usually :meth:`~object.__init_subclass__`) @@ -2075,18 +2081,18 @@ are always available. They are listed here in alphabetical order. See also :ref:`class-customization`. .. versionchanged:: 3.6 - Subclasses of :class:`type` which don't override ``type.__new__`` may no + Subclasses of :class:`!type` which don't override ``type.__new__`` may no longer use the one-argument form to get the type of an object. .. function:: vars() vars(object) Return the :attr:`~object.__dict__` attribute for a module, class, instance, - or any other object with a :attr:`~object.__dict__` attribute. + or any other object with a :attr:`!__dict__` attribute. Objects such as modules and instances have an updateable :attr:`~object.__dict__` attribute; however, other objects may have write restrictions on their - :attr:`~object.__dict__` attributes (for example, classes use a + :attr:`!__dict__` attributes (for example, classes use a :class:`types.MappingProxyType` to prevent direct dictionary updates). Without an argument, :func:`vars` acts like :func:`locals`. diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst index 008cde399baed2..46136def06dc05 100644 --- a/Doc/library/functools.rst +++ b/Doc/library/functools.rst @@ -328,6 +328,14 @@ The :mod:`functools` module defines the following functions: Returning ``NotImplemented`` from the underlying comparison function for unrecognised types is now supported. +.. data:: Placeholder + + A singleton object used as a sentinel to reserve a place + for positional arguments when calling :func:`partial` + and :func:`partialmethod`. + + .. versionadded:: 3.14 + .. function:: partial(func, /, *args, **keywords) Return a new :ref:`partial object` which when called @@ -338,26 +346,69 @@ The :mod:`functools` module defines the following functions: Roughly equivalent to:: def partial(func, /, *args, **keywords): - def newfunc(*fargs, **fkeywords): - newkeywords = {**keywords, **fkeywords} - return func(*args, *fargs, **newkeywords) + def newfunc(*more_args, **more_keywords): + keywords_union = {**keywords, **more_keywords} + return func(*args, *more_args, **keywords_union) newfunc.func = func newfunc.args = args newfunc.keywords = keywords return newfunc - The :func:`partial` is used for partial function application which "freezes" + The :func:`!partial` function is used for partial function application which "freezes" some portion of a function's arguments and/or keywords resulting in a new object with a simplified signature. For example, :func:`partial` can be used to create a callable that behaves like the :func:`int` function where the *base* argument - defaults to two: + defaults to ``2``: + + .. doctest:: - >>> from functools import partial >>> basetwo = partial(int, base=2) >>> basetwo.__doc__ = 'Convert base 2 string to an int.' >>> basetwo('10010') 18 + If :data:`Placeholder` sentinels are present in *args*, they will be filled first + when :func:`!partial` is called. This makes it possible to pre-fill any positional + argument with a call to :func:`!partial`; without :data:`!Placeholder`, only the + first positional argument can be pre-filled. + + If any :data:`!Placeholder` sentinels are present, all must be filled at call time: + + .. doctest:: + + >>> say_to_world = partial(print, Placeholder, Placeholder, "world!") + >>> say_to_world('Hello', 'dear') + Hello dear world! + + Calling ``say_to_world('Hello')`` raises a :exc:`TypeError`, because + only one positional argument is provided, but there are two placeholders + that must be filled in. + + If :func:`!partial` is applied to an existing :func:`!partial` object, + :data:`!Placeholder` sentinels of the input object are filled in with + new positional arguments. + A placeholder can be retained by inserting a new + :data:`!Placeholder` sentinel to the place held by a previous :data:`!Placeholder`: + + .. doctest:: + + >>> from functools import partial, Placeholder as _ + >>> remove = partial(str.replace, _, _, '') + >>> message = 'Hello, dear dear world!' + >>> remove(message, ' dear') + 'Hello, world!' + >>> remove_dear = partial(remove, _, ' dear') + >>> remove_dear(message) + 'Hello, world!' + >>> remove_first_dear = partial(remove_dear, _, 1) + >>> remove_first_dear(message) + 'Hello, dear world!' + + :data:`!Placeholder` has no special treatment when used in a keyword + argument to :func:`!partial`. + + .. versionchanged:: 3.14 + Added support for :data:`Placeholder` in positional arguments. .. class:: partialmethod(func, /, *args, **keywords) @@ -492,6 +543,25 @@ The :mod:`functools` module defines the following functions: ... print(arg.real, arg.imag) ... + For code that dispatches on a collections type (e.g., ``list``), but wants + to typehint the items of the collection (e.g., ``list[int]``), the + dispatch type should be passed explicitly to the decorator itself with the + typehint going into the function definition:: + + >>> @fun.register(list) + ... def _(arg: list[int], verbose=False): + ... if verbose: + ... print("Enumerate this:") + ... for i, elem in enumerate(arg): + ... print(i, elem) + + .. note:: + + At runtime the function will dispatch on an instance of a list regardless + of the type contained within the list i.e. ``[1,2,3]`` will be + dispatched the same as ``["foo", "bar", "baz"]``. The annotation + provided in this example is for static type checkers only and has no + runtime impact. To enable registering :term:`lambdas` and pre-existing functions, the :func:`register` attribute can also be used in a functional form:: @@ -646,10 +716,11 @@ The :mod:`functools` module defines the following functions: attributes of the wrapper function are updated with the corresponding attributes from the original function. The default values for these arguments are the module level constants ``WRAPPER_ASSIGNMENTS`` (which assigns to the wrapper - function's ``__module__``, ``__name__``, ``__qualname__``, ``__annotations__``, - ``__type_params__``, and ``__doc__``, the documentation string) - and ``WRAPPER_UPDATES`` (which - updates the wrapper function's ``__dict__``, i.e. the instance dictionary). + function's :attr:`~function.__module__`, :attr:`~function.__name__`, + :attr:`~function.__qualname__`, :attr:`~function.__annotations__`, + :attr:`~function.__type_params__`, and :attr:`~function.__doc__`, the + documentation string) and ``WRAPPER_UPDATES`` (which updates the wrapper + function's :attr:`~function.__dict__`, i.e. the instance dictionary). To allow access to the original function for introspection and other purposes (e.g. bypassing a caching decorator such as :func:`lru_cache`), this function @@ -670,7 +741,7 @@ The :mod:`functools` module defines the following functions: .. versionchanged:: 3.2 The ``__wrapped__`` attribute is now automatically added. - The ``__annotations__`` attribute is now copied by default. + The :attr:`~function.__annotations__` attribute is now copied by default. Missing attributes no longer trigger an :exc:`AttributeError`. .. versionchanged:: 3.4 @@ -679,7 +750,7 @@ The :mod:`functools` module defines the following functions: (see :issue:`17482`) .. versionchanged:: 3.12 - The ``__type_params__`` attribute is now copied by default. + The :attr:`~function.__type_params__` attribute is now copied by default. .. decorator:: wraps(wrapped, assigned=WRAPPER_ASSIGNMENTS, updated=WRAPPER_UPDATES) @@ -741,9 +812,7 @@ have three read-only attributes: The keyword arguments that will be supplied when the :class:`partial` object is called. -:class:`partial` objects are like :class:`function` objects in that they are +:class:`partial` objects are like :ref:`function objects ` in that they are callable, weak referenceable, and can have attributes. There are some important -differences. For instance, the :attr:`~definition.__name__` and :attr:`__doc__` attributes -are not created automatically. Also, :class:`partial` objects defined in -classes behave like static methods and do not transform into bound methods -during instance attribute look-up. +differences. For instance, the :attr:`~definition.__name__` and :attr:`~definition.__doc__` attributes +are not created automatically. diff --git a/Doc/library/importlib.rst b/Doc/library/importlib.rst index e4cef1f3e3b7c0..27d31f66b12495 100644 --- a/Doc/library/importlib.rst +++ b/Doc/library/importlib.rst @@ -1166,10 +1166,9 @@ find and load modules. .. class:: ModuleSpec(name, loader, *, origin=None, loader_state=None, is_package=None) A specification for a module's import-system-related state. This is - typically exposed as the module's :attr:`__spec__` attribute. In the - descriptions below, the names in parentheses give the corresponding - attribute available directly on the module object, - e.g. ``module.__spec__.origin == module.__file__``. Note, however, that + typically exposed as the module's :attr:`__spec__` attribute. Many + of these attributes are also available directly on a module: for example, + ``module.__spec__.origin == module.__file__``. Note, however, that while the *values* are usually equivalent, they can differ since there is no synchronization between the two objects. For example, it is possible to update the module's :attr:`__file__` at runtime and this will not be automatically @@ -1179,66 +1178,60 @@ find and load modules. .. attribute:: name - (:attr:`__name__`) - - The module's fully qualified name. - The :term:`finder` should always set this attribute to a non-empty string. + The module's fully qualified name + (see :attr:`__name__` attributes on modules). + The :term:`finder` should always set this attribute to a non-empty string. .. attribute:: loader - (:attr:`__loader__`) - - The :term:`loader` used to load the module. - The :term:`finder` should always set this attribute. + The :term:`loader` used to load the module + (see :attr:`__loader__` attributes on modules). + The :term:`finder` should always set this attribute. .. attribute:: origin - (:attr:`__file__`) - - The location the :term:`loader` should use to load the module. - For example, for modules loaded from a .py file this is the filename. - The :term:`finder` should always set this attribute to a meaningful value - for the :term:`loader` to use. In the uncommon case that there is not one - (like for namespace packages), it should be set to ``None``. + The location the :term:`loader` should use to load the module + (see :attr:`__file__` attributes on modules). + For example, for modules loaded from a .py file this is the filename. + The :term:`finder` should always set this attribute to a meaningful value + for the :term:`loader` to use. In the uncommon case that there is not one + (like for namespace packages), it should be set to ``None``. .. attribute:: submodule_search_locations - (:attr:`__path__`) - - The list of locations where the package's submodules will be found. - Most of the time this is a single directory. - The :term:`finder` should set this attribute to a list, even an empty one, to indicate - to the import system that the module is a package. It should be set to ``None`` for - non-package modules. It is set automatically later to a special object for - namespace packages. + The list of locations where the package's submodules will be found + (see :attr:`__path__` attributes on modules). + Most of the time this is a single directory. + The :term:`finder` should set this attribute to a list, even an empty one, to indicate + to the import system that the module is a package. It should be set to ``None`` for + non-package modules. It is set automatically later to a special object for + namespace packages. .. attribute:: loader_state - The :term:`finder` may set this attribute to an object containing additional, - module-specific data to use when loading the module. Otherwise it should be - set to ``None``. + The :term:`finder` may set this attribute to an object containing additional, + module-specific data to use when loading the module. Otherwise it should be + set to ``None``. .. attribute:: cached - (:attr:`__cached__`) - - The filename of a compiled version of the module's code. - The :term:`finder` should always set this attribute but it may be ``None`` - for modules that do not need compiled code stored. + The filename of a compiled version of the module's code + (see :attr:`__cached__` attributes on modules). + The :term:`finder` should always set this attribute but it may be ``None`` + for modules that do not need compiled code stored. .. attribute:: parent - (:attr:`__package__`) - - (Read-only) The fully qualified name of the package the module is in (or the - empty string for a top-level module). - If the module is a package then this is the same as :attr:`name`. + (Read-only) The fully qualified name of the package the module is in (or the + empty string for a top-level module). + See :attr:`__package__` attributes on modules. + If the module is a package then this is the same as :attr:`name`. .. attribute:: has_location - ``True`` if the spec's :attr:`origin` refers to a loadable location, - ``False`` otherwise. This value impacts how :attr:`origin` is interpreted - and how the module's :attr:`__file__` is populated. + ``True`` if the spec's :attr:`origin` refers to a loadable location, + ``False`` otherwise. This value impacts how :attr:`origin` is interpreted + and how the module's :attr:`__file__` is populated. .. class:: AppleFrameworkLoader(name, path) diff --git a/Doc/library/inspect.rst b/Doc/library/inspect.rst index 57e5cf7ae023d1..853671856b2a14 100644 --- a/Doc/library/inspect.rst +++ b/Doc/library/inspect.rst @@ -545,7 +545,7 @@ attributes (see :ref:`import-mod-attrs` for module attributes): has a :meth:`~object.__get__` method, but not a :meth:`~object.__set__` method or a :meth:`~object.__delete__` method. Beyond that, the set of attributes varies. A :attr:`~definition.__name__` attribute is usually - sensible, and :attr:`!__doc__` often is. + sensible, and :attr:`~definition.__doc__` often is. Methods implemented via descriptors that also pass one of the other tests return ``False`` from the :func:`ismethoddescriptor` test, simply because the diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst index 508c20f4df6f5e..9a62249816c9bf 100644 --- a/Doc/library/itertools.rst +++ b/Doc/library/itertools.rst @@ -58,7 +58,7 @@ Iterator Arguments Results :func:`compress` data, selectors (d[0] if s[0]), (d[1] if s[1]), ... ``compress('ABCDEF', [1,0,1,0,1,1]) → A C E F`` :func:`dropwhile` predicate, seq seq[n], seq[n+1], starting when predicate fails ``dropwhile(lambda x: x<5, [1,4,6,3,8]) → 6 3 8`` :func:`filterfalse` predicate, seq elements of seq where predicate(elem) fails ``filterfalse(lambda x: x<5, [1,4,6,3,8]) → 6 8`` -:func:`groupby` iterable[, key] sub-iterators grouped by value of key(v) ``groupby(['A','B','ABC'], len) → (1, A B) (3, ABC)`` +:func:`groupby` iterable[, key] sub-iterators grouped by value of key(v) ``groupby(['A','B','DEF'], len) → (1, A B) (3, DEF)`` :func:`islice` seq, [start,] stop [, step] elements from seq[start:stop:step] ``islice('ABCDEFG', 2, None) → C D E F G`` :func:`pairwise` iterable (p[0], p[1]), (p[1], p[2]) ``pairwise('ABCDEFG') → AB BC CD DE EF FG`` :func:`starmap` func, seq func(\*seq[0]), func(\*seq[1]), ... ``starmap(pow, [(2,5), (3,2), (10,3)]) → 32 9 1000`` @@ -93,7 +93,7 @@ Examples Results Itertool Functions ------------------ -The following module functions all construct and return iterators. Some provide +The following functions all construct and return iterators. Some provide streams of infinite length, so they should only be accessed by functions or loops that truncate the stream. @@ -131,11 +131,12 @@ loops that truncate the stream. total = function(total, element) yield total - The *function* argument can be set to :func:`min` for a running - minimum, :func:`max` for a running maximum, or :func:`operator.mul` - for a running product. `Amortization tables - `_ - can be built by accumulating interest and applying payments: + To compute a running minimum, set *function* to :func:`min`. + For a running maximum, set *function* to :func:`max`. + Or for a running product, set *function* to :func:`operator.mul`. + To build an `Amortization table + `_, + accumulate the interest and apply payments: .. doctest:: @@ -202,10 +203,10 @@ loops that truncate the stream. .. function:: chain(*iterables) - Make an iterator that returns elements from the first iterable until it is - exhausted, then proceeds to the next iterable, until all of the iterables are - exhausted. Used for treating consecutive sequences as a single sequence. - Roughly equivalent to:: + Make an iterator that returns elements from the first iterable until + it is exhausted, then proceeds to the next iterable, until all of the + iterables are exhausted. This combines multiple data sources into a + single iterator. Roughly equivalent to:: def chain(*iterables): # chain('ABC', 'DEF') → A B C D E F @@ -353,10 +354,12 @@ loops that truncate the stream. def cycle(iterable): # cycle('ABCD') → A B C D A B C D A B C D ... + saved = [] for element in iterable: yield element saved.append(element) + while saved: for element in saved: yield element @@ -396,8 +399,10 @@ loops that truncate the stream. def filterfalse(predicate, iterable): # filterfalse(lambda x: x<5, [1,4,6,3,8]) → 6 8 + if predicate is None: predicate = bool + for x in iterable: if not predicate(x): yield x @@ -474,7 +479,7 @@ loops that truncate the stream. If *start* is zero or ``None``, iteration starts at zero. Otherwise, elements from the iterable are skipped until *start* is reached. - If *stop* is ``None``, iteration continues until the iterable is + If *stop* is ``None``, iteration continues until the input is exhausted, if at all. Otherwise, it stops at the specified position. If *step* is ``None``, the step defaults to one. Elements are returned @@ -520,8 +525,10 @@ loops that truncate the stream. def pairwise(iterable): # pairwise('ABCDEFG') → AB BC CD DE EF FG + iterator = iter(iterable) a = next(iterator, None) + for b in iterator: yield a, b a = b @@ -584,7 +591,8 @@ loops that truncate the stream. .. function:: product(*iterables, repeat=1) - Cartesian product of input iterables. + `Cartesian product `_ + of the input iterables. Roughly equivalent to nested for-loops in a generator expression. For example, ``product(A, B)`` returns the same as ``((x,y) for x in A for y in B)``. @@ -691,25 +699,36 @@ loops that truncate the stream. def tee(iterable, n=2): if n < 0: - raise ValueError('n must be >= 0') - iterator = iter(iterable) - shared_link = [None, None] - return tuple(_tee(iterator, shared_link) for _ in range(n)) - - def _tee(iterator, link): - try: - while True: - if link[1] is None: - link[0] = next(iterator) - link[1] = [None, None] - value, link = link - yield value - except StopIteration: - return - - Once a :func:`tee` has been created, the original *iterable* should not be - used anywhere else; otherwise, the *iterable* could get advanced without - the tee objects being informed. + raise ValueError + if n == 0: + return () + iterator = _tee(iterable) + result = [iterator] + for _ in range(n - 1): + result.append(_tee(iterator)) + return tuple(result) + + class _tee: + + def __init__(self, iterable): + it = iter(iterable) + if isinstance(it, _tee): + self.iterator = it.iterator + self.link = it.link + else: + self.iterator = it + self.link = [None, None] + + def __iter__(self): + return self + + def __next__(self): + link = self.link + if link[1] is None: + link[0] = next(self.iterator) + link[1] = [None, None] + value, self.link = link + return value When the input *iterable* is already a tee iterator object, all members of the return tuple are constructed as if they had been diff --git a/Doc/library/logging.rst b/Doc/library/logging.rst index 6a67d6c75374af..235bcc281ac8f8 100644 --- a/Doc/library/logging.rst +++ b/Doc/library/logging.rst @@ -304,7 +304,8 @@ in a module, ``__name__`` is the module's name in the Python package namespace. parameter mirrors the equivalent one in the :mod:`warnings` module. The fourth keyword argument is *extra* which can be used to pass a - dictionary which is used to populate the __dict__ of the :class:`LogRecord` + dictionary which is used to populate the :attr:`~object.__dict__` of the + :class:`LogRecord` created for the logging event with user-defined attributes. These custom attributes can then be used as you like. For example, they could be incorporated into logged messages. For example:: diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst index 80d6e4dae24463..036b8f44b9ff3b 100644 --- a/Doc/library/multiprocessing.rst +++ b/Doc/library/multiprocessing.rst @@ -124,11 +124,11 @@ to start a process. These *start methods* are inherited by the child process. Note that safely forking a multithreaded process is problematic. - Available on POSIX systems. Currently the default on POSIX except macOS. + Available on POSIX systems. - .. note:: - The default start method will change away from *fork* in Python 3.14. - Code that requires *fork* should explicitly specify that via + .. versionchanged:: 3.14 + This is no longer the default start method on any platform. + Code that requires *fork* must explicitly specify that via :func:`get_context` or :func:`set_start_method`. .. versionchanged:: 3.12 @@ -146,9 +146,11 @@ to start a process. These *start methods* are side-effect so it is generally safe for it to use :func:`os.fork`. No unnecessary resources are inherited. - Available on POSIX platforms which support passing file descriptors - over Unix pipes such as Linux. + Available on POSIX platforms which support passing file descriptors over + Unix pipes such as Linux. The default on those. + .. versionchanged:: 3.14 + This became the default start method on POSIX platforms. .. versionchanged:: 3.4 *spawn* added on all POSIX platforms, and *forkserver* added for @@ -162,6 +164,13 @@ to start a process. These *start methods* are method should be considered unsafe as it can lead to crashes of the subprocess as macOS system libraries may start threads. See :issue:`33725`. +.. versionchanged:: 3.14 + + On POSIX platforms the default start method was changed from *fork* to + *forkserver* to retain the performance but avoid common multithreaded + process incompatibilities. See :gh:`84559`. + + On POSIX using the *spawn* or *forkserver* start methods will also start a *resource tracker* process which tracks the unlinked named system resources (such as named semaphores or diff --git a/Doc/library/os.rst b/Doc/library/os.rst index cd7ae7bdd7385a..33dd58febd9a5e 100644 --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -4602,7 +4602,7 @@ written in Python, such as a mail server's external command delivery program. See the :manpage:`pidfd_open(2)` man page for more details. - .. availability:: Linux >= 5.3 + .. availability:: Linux >= 5.3, Android >= :func:`build-time ` API level 31 .. versionadded:: 3.9 .. data:: PIDFD_NONBLOCK diff --git a/Doc/library/pathlib.rst b/Doc/library/pathlib.rst index 4380122eb1be7d..30d0d385d0539c 100644 --- a/Doc/library/pathlib.rst +++ b/Doc/library/pathlib.rst @@ -1563,7 +1563,7 @@ Copying, moving and deleting This argument has no effect when copying files on Windows (where metadata is always preserved). - .. versionadded:: 3.14 + .. versionadded:: next .. method:: Path.copy_into(target_dir, *, follow_symlinks=True, \ @@ -1574,7 +1574,7 @@ Copying, moving and deleting :meth:`Path.copy`. Returns a new :class:`!Path` instance pointing to the copy. - .. versionadded:: 3.14 + .. versionadded:: next .. method:: Path.rename(target) diff --git a/Doc/library/pdb.rst b/Doc/library/pdb.rst index d696161876e99d..6c099b22b38c21 100644 --- a/Doc/library/pdb.rst +++ b/Doc/library/pdb.rst @@ -159,12 +159,15 @@ slightly different way: is entered. -.. function:: set_trace(*, header=None) +.. function:: set_trace(*, header=None, commands=None) Enter the debugger at the calling stack frame. This is useful to hard-code a breakpoint at a given point in a program, even if the code is not otherwise being debugged (e.g. when an assertion fails). If given, *header* is printed to the console just before debugging begins. + The *commands* argument, if given, is a list of commands to execute + when the debugger starts. + .. versionchanged:: 3.7 The keyword-only argument *header*. @@ -173,6 +176,9 @@ slightly different way: :func:`set_trace` will enter the debugger immediately, rather than on the next line of code to be executed. + .. versionadded:: 3.14 + The *commands* argument. + .. function:: post_mortem(traceback=None) Enter post-mortem debugging of the given *traceback* object. If no @@ -192,7 +198,7 @@ The ``run*`` functions and :func:`set_trace` are aliases for instantiating the access further features, you have to do this yourself: .. class:: Pdb(completekey='tab', stdin=None, stdout=None, skip=None, \ - nosigint=False, readrc=True) + nosigint=False, readrc=True, mode=None) :class:`Pdb` is the debugger class. @@ -211,6 +217,13 @@ access further features, you have to do this yourself: The *readrc* argument defaults to true and controls whether Pdb will load .pdbrc files from the filesystem. + The *mode* argument specifies how the debugger was invoked. + It impacts the workings of some debugger commands. + Valid values are ``'inline'`` (used by the breakpoint() builtin), + ``'cli'`` (used by the command line invocation) + or ``None`` (for backwards compatible behaviour, as before the *mode* + argument was added). + Example call to enable tracing with *skip*:: import pdb; pdb.Pdb(skip=['django.*']).set_trace() @@ -227,6 +240,9 @@ access further features, you have to do this yourself: .. versionchanged:: 3.6 The *readrc* argument. + .. versionadded:: 3.14 + Added the *mode* argument. + .. method:: run(statement, globals=None, locals=None) runeval(expression, globals=None, locals=None) runcall(function, *args, **kwds) @@ -423,17 +439,20 @@ can be overridden by the local file. Specifying any command resuming execution (currently :pdbcmd:`continue`, :pdbcmd:`step`, :pdbcmd:`next`, - :pdbcmd:`return`, :pdbcmd:`jump`, :pdbcmd:`quit` and their abbreviations) + :pdbcmd:`return`, :pdbcmd:`until`, :pdbcmd:`jump`, :pdbcmd:`quit` and their abbreviations) terminates the command list (as if that command was immediately followed by end). This is because any time you resume execution (even with a simple next or step), you may encounter another breakpoint—which could have its own command list, leading to ambiguities about which list to execute. - If you use the ``silent`` command in the command list, the usual message about - stopping at a breakpoint is not printed. This may be desirable for breakpoints - that are to print a specific message and then continue. If none of the other - commands print anything, you see no sign that the breakpoint was reached. + If the list of commands contains the ``silent`` command, or a command that + resumes execution, then the breakpoint message containing information about + the frame is not displayed. + + .. versionchanged:: 3.14 + Frame information will not be displayed if a command that resumes execution + is present in the command list. .. pdbcommand:: s(tep) @@ -669,6 +688,10 @@ can be overridden by the local file. History, breakpoints, actions and debugger options are preserved. :pdbcmd:`restart` is an alias for :pdbcmd:`run`. + .. versionchanged:: 3.14 + :pdbcmd:`run` and :pdbcmd:`restart` commands are disabled when the + debugger is invoked in ``'inline'`` mode. + .. pdbcommand:: q(uit) Quit from the debugger. The program being executed is aborted. diff --git a/Doc/library/pydoc.rst b/Doc/library/pydoc.rst index f7ca1e045699eb..70e9c604ebac4f 100644 --- a/Doc/library/pydoc.rst +++ b/Doc/library/pydoc.rst @@ -21,7 +21,7 @@ modules. The documentation can be presented as pages of text on the console, served to a web browser, or saved to HTML files. For modules, classes, functions and methods, the displayed documentation is -derived from the docstring (i.e. the :attr:`!__doc__` attribute) of the object, +derived from the docstring (i.e. the :attr:`~definition.__doc__` attribute) of the object, and recursively of its documentable members. If there is no docstring, :mod:`!pydoc` tries to obtain a description from the block of comment lines just above the definition of the class, function or method in the source file, or at diff --git a/Doc/library/random.rst b/Doc/library/random.rst index c7f6b0bdd5b822..ef0cfb0e76cef6 100644 --- a/Doc/library/random.rst +++ b/Doc/library/random.rst @@ -741,7 +741,7 @@ The following options are accepted: .. option:: -f --float - Print a random floating-point number between 1 and N inclusive, + Print a random floating-point number between 0 and N inclusive, using :meth:`uniform`. If no options are given, the output depends on the input: diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst index e623c3df7beba6..d25701c087ed07 100644 --- a/Doc/library/shutil.rst +++ b/Doc/library/shutil.rst @@ -449,9 +449,10 @@ Directory and files operations *mode* is a permission mask passed to :func:`os.access`, by default determining if the file exists and is executable. - *path* is a "``PATH`` string" specifying the lookup directory list. When no - *path* is specified, the results of :func:`os.environ` are used, returning - either the "PATH" value or a fallback of :data:`os.defpath`. + *path* is a "``PATH`` string" specifying the directories to look in, + delimited by :data:`os.pathsep`. When no *path* is specified, the + :envvar:`PATH` environment variable is read from :data:`os.environ`, + falling back to :data:`os.defpath` if it is not set. On Windows, the current directory is prepended to the *path* if *mode* does not include ``os.X_OK``. When the *mode* does include ``os.X_OK``, the @@ -460,9 +461,9 @@ Directory and files operations consulting the current working directory for executables: set the environment variable ``NoDefaultCurrentDirectoryInExePath``. - Also on Windows, the ``PATHEXT`` variable is used to resolve commands - that may not already include an extension. For example, if you call - ``shutil.which("python")``, :func:`which` will search ``PATHEXT`` + Also on Windows, the :envvar:`PATHEXT` environment variable is used to + resolve commands that may not already include an extension. For example, + if you call ``shutil.which("python")``, :func:`which` will search ``PATHEXT`` to know that it should look for ``python.exe`` within the *path* directories. For example, on Windows:: diff --git a/Doc/library/signal.rst b/Doc/library/signal.rst index 79c4948e99e967..17fcb2b3707978 100644 --- a/Doc/library/signal.rst +++ b/Doc/library/signal.rst @@ -411,7 +411,7 @@ The :mod:`signal` module defines the following functions: See the :manpage:`pidfd_send_signal(2)` man page for more information. - .. availability:: Linux >= 5.1 + .. availability:: Linux >= 5.1, Android >= :func:`build-time ` API level 31 .. versionadded:: 3.9 diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst index 714507ce73c807..833c71c4ce4b9a 100644 --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -5521,22 +5521,6 @@ types, where they are relevant. Some of these are not reported by the :func:`dir` built-in function. -.. attribute:: object.__dict__ - - A dictionary or other mapping object used to store an object's (writable) - attributes. - - -.. attribute:: instance.__class__ - - The class to which a class instance belongs. - - -.. attribute:: class.__bases__ - - The tuple of base classes of a class object. - - .. attribute:: definition.__name__ The name of the class, function, method, descriptor, or @@ -5551,43 +5535,24 @@ types, where they are relevant. Some of these are not reported by the .. versionadded:: 3.3 -.. attribute:: definition.__type_params__ +.. attribute:: definition.__module__ - The :ref:`type parameters ` of generic classes, functions, - and :ref:`type aliases `. + The name of the module in which a class or function was defined. - .. versionadded:: 3.12 - - -.. attribute:: class.__mro__ - - This attribute is a tuple of classes that are considered when looking for - base classes during method resolution. - - -.. method:: class.mro() - This method can be overridden by a metaclass to customize the method - resolution order for its instances. It is called at class instantiation, and - its result is stored in :attr:`~class.__mro__`. +.. attribute:: definition.__doc__ + The documentation string of a class or function, or ``None`` if undefined. -.. method:: class.__subclasses__ - - Each class keeps a list of weak references to its immediate subclasses. This - method returns a list of all those references still alive. The list is in - definition order. Example:: - - >>> int.__subclasses__() - [, , , ] +.. attribute:: definition.__type_params__ -.. attribute:: class.__static_attributes__ + The :ref:`type parameters ` of generic classes, functions, + and :ref:`type aliases `. For classes and functions that + are not generic, this will be an empty tuple. - A tuple containing names of attributes of this class which are accessed - through ``self.X`` from any function in its body. + .. versionadded:: 3.12 - .. versionadded:: 3.13 .. _int_max_str_digits: diff --git a/Doc/library/string.rst b/Doc/library/string.rst index 1f316307965c11..57a1f920523035 100644 --- a/Doc/library/string.rst +++ b/Doc/library/string.rst @@ -574,11 +574,13 @@ The available presentation types for :class:`float` and | ``'%'`` | Percentage. Multiplies the number by 100 and displays | | | in fixed (``'f'``) format, followed by a percent sign. | +---------+----------------------------------------------------------+ - | None | For :class:`float` this is the same as ``'g'``, except | + | None | For :class:`float` this is like the ``'g'`` type, except | | | that when fixed-point notation is used to format the | | | result, it always includes at least one digit past the | - | | decimal point. The precision used is as large as needed | - | | to represent the given value faithfully. | + | | decimal point, and switches to the scientific notation | + | | when ``exp >= p - 1``. When the precision is not | + | | specified, the latter will be as large as needed to | + | | represent the given value faithfully. | | | | | | For :class:`~decimal.Decimal`, this is the same as | | | either ``'g'`` or ``'G'`` depending on the value of | diff --git a/Doc/library/symtable.rst b/Doc/library/symtable.rst index 8ebcb3bcf1b7b4..15e0b23aa12bf0 100644 --- a/Doc/library/symtable.rst +++ b/Doc/library/symtable.rst @@ -255,7 +255,7 @@ Examining Symbol Tables Return ``True`` if the symbol is a type parameter. - .. versionadded:: 3.14 + .. versionadded:: next .. method:: is_global() @@ -302,7 +302,7 @@ Examining Symbol Tables be free from the perspective of ``C.method``, thereby allowing the latter to return *1* at runtime and not *2*. - .. versionadded:: 3.14 + .. versionadded:: next .. method:: is_assigned() @@ -312,13 +312,13 @@ Examining Symbol Tables Return ``True`` if the symbol is a comprehension iteration variable. - .. versionadded:: 3.14 + .. versionadded:: next .. method:: is_comp_cell() Return ``True`` if the symbol is a cell in an inlined comprehension. - .. versionadded:: 3.14 + .. versionadded:: next .. method:: is_namespace() diff --git a/Doc/library/test.rst b/Doc/library/test.rst index 12f86043095598..04d28aee0f8672 100644 --- a/Doc/library/test.rst +++ b/Doc/library/test.rst @@ -946,7 +946,7 @@ The :mod:`test.support` module defines the following functions: other modules, possibly a C backend (like ``csv`` and its ``_csv``). The *extra* argument can be a set of names that wouldn't otherwise be automatically - detected as "public", like objects without a proper ``__module__`` + detected as "public", like objects without a proper :attr:`~definition.__module__` attribute. If provided, it will be added to the automatically detected ones. The *not_exported* argument can be a set of names that must not be treated diff --git a/Doc/library/types.rst b/Doc/library/types.rst index 116868c24be864..3c3c760c206ff2 100644 --- a/Doc/library/types.rst +++ b/Doc/library/types.rst @@ -91,8 +91,8 @@ Dynamic Type Creation For classes that have an ``__orig_bases__`` attribute, this function returns the value of ``cls.__orig_bases__``. - For classes without the ``__orig_bases__`` attribute, ``cls.__bases__`` is - returned. + For classes without the ``__orig_bases__`` attribute, + :attr:`cls.__bases__ ` is returned. Examples:: @@ -392,7 +392,7 @@ Standard names are defined for the following types: In addition, when a class is defined with a :attr:`~object.__slots__` attribute, then for each slot, an instance of :class:`!MemberDescriptorType` will be added as an attribute - on the class. This allows the slot to appear in the class's :attr:`~object.__dict__`. + on the class. This allows the slot to appear in the class's :attr:`~type.__dict__`. .. impl-detail:: diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst index f52c593a086c0a..cd8b90854b0e94 100644 --- a/Doc/library/typing.rst +++ b/Doc/library/typing.rst @@ -2349,7 +2349,9 @@ types. Backward-compatible usage:: - # For creating a generic NamedTuple on Python 3.11 or lower + # For creating a generic NamedTuple on Python 3.11 + T = TypeVar("T") + class Group(NamedTuple, Generic[T]): key: T group: list[T] @@ -3269,7 +3271,8 @@ Introspection helpers empty dictionary is returned. * If *obj* is a class ``C``, the function returns a dictionary that merges annotations from ``C``'s base classes with those on ``C`` directly. This - is done by traversing ``C.__mro__`` and iteratively combining + is done by traversing :attr:`C.__mro__ ` and iteratively + combining ``__annotations__`` dictionaries. Annotations on classes appearing earlier in the :term:`method resolution order` always take precedence over annotations on classes appearing later in the method resolution order. @@ -3426,7 +3429,7 @@ Introspection helpers * Replaces type hints that evaluate to :const:`!None` with :class:`types.NoneType`. * Supports the :attr:`~annotationlib.Format.FORWARDREF` and - :attr:`~annotationlib.Format.SOURCE` formats. + :attr:`~annotationlib.Format.STRING` formats. *forward_ref* must be an instance of :class:`~annotationlib.ForwardRef`. *owner*, if given, should be the object that holds the annotations that diff --git a/Doc/library/unittest.mock.rst b/Doc/library/unittest.mock.rst index e15f8a4e903dc5..cc2b1b4299553c 100644 --- a/Doc/library/unittest.mock.rst +++ b/Doc/library/unittest.mock.rst @@ -239,7 +239,7 @@ the *new_callable* argument to :func:`patch`. Accessing any attribute not in this list will raise an :exc:`AttributeError`. If *spec* is an object (rather than a list of strings) then - :attr:`~instance.__class__` returns the class of the spec object. This + :attr:`~object.__class__` returns the class of the spec object. This allows mocks to pass :func:`isinstance` tests. * *spec_set*: A stricter variant of *spec*. If used, attempting to *set* @@ -401,6 +401,8 @@ the *new_callable* argument to :func:`patch`. The reset_mock method resets all the call attributes on a mock object: + .. doctest:: + >>> mock = Mock(return_value=None) >>> mock('hello') >>> mock.called @@ -409,20 +411,41 @@ the *new_callable* argument to :func:`patch`. >>> mock.called False - .. versionchanged:: 3.6 - Added two keyword-only arguments to the reset_mock function. - This can be useful where you want to make a series of assertions that - reuse the same object. Note that :meth:`reset_mock` *doesn't* clear the + reuse the same object. + + *return_value* parameter when set to ``True`` resets :attr:`return_value`: + + .. doctest:: + + >>> mock = Mock(return_value=5) + >>> mock('hello') + 5 + >>> mock.reset_mock(return_value=True) + >>> mock('hello') # doctest: +ELLIPSIS + + + *side_effect* parameter when set to ``True`` resets :attr:`side_effect`: + + .. doctest:: + + >>> mock = Mock(side_effect=ValueError) + >>> mock('hello') + Traceback (most recent call last): + ... + ValueError + >>> mock.reset_mock(side_effect=True) + >>> mock('hello') # doctest: +ELLIPSIS + + + Note that :meth:`reset_mock` *doesn't* clear the :attr:`return_value`, :attr:`side_effect` or any child attributes you have - set using normal assignment by default. In case you want to reset - :attr:`return_value` or :attr:`side_effect`, then pass the corresponding - parameter as ``True``. Child mocks and the return value mock - (if any) are reset as well. + set using normal assignment by default. - .. note:: *return_value*, and *side_effect* are keyword-only - arguments. + Child mocks are reset as well. + .. versionchanged:: 3.6 + Added two keyword-only arguments to the reset_mock function. .. method:: mock_add_spec(spec, spec_set=False) diff --git a/Doc/library/venv.rst b/Doc/library/venv.rst index cf6c5437be4fd1..e2c77963ff3040 100644 --- a/Doc/library/venv.rst +++ b/Doc/library/venv.rst @@ -37,14 +37,14 @@ A virtual environment is (amongst other things): are by default isolated from software in other virtual environments and Python interpreters and libraries installed in the operating system. -* Contained in a directory, conventionally either named ``venv`` or ``.venv`` in +* Contained in a directory, conventionally named ``.venv`` or ``venv`` in the project directory, or under a container directory for lots of virtual environments, such as ``~/.virtualenvs``. * Not checked into source control systems such as Git. * Considered as disposable -- it should be simple to delete and recreate it from - scratch. You don't place any project code in the environment + scratch. You don't place any project code in the environment. * Not considered as movable or copyable -- you just recreate the same environment in the target location. @@ -61,7 +61,127 @@ See :pep:`405` for more background on Python virtual environments. Creating virtual environments ----------------------------- -.. include:: /using/venv-create.inc +:ref:`Virtual environments ` are created by executing the ``venv`` +module: + +.. code-block:: shell + + python -m venv /path/to/new/virtual/environment + +This creates the target directory (including parent directories as needed) +and places a :file:`pyvenv.cfg` file in it with a ``home`` key +pointing to the Python installation from which the command was run. +It also creates a :file:`bin` (or :file:`Scripts` on Windows) subdirectory +containing a copy or symlink of the Python executable +(as appropriate for the platform or arguments used at environment creation time). +It also creates a :file:`lib/pythonX.Y/site-packages` subdirectory +(on Windows, this is :file:`Lib\site-packages`). +If an existing directory is specified, it will be re-used. + +.. versionchanged:: 3.5 + The use of ``venv`` is now recommended for creating virtual environments. + +.. deprecated-removed:: 3.6 3.8 + :program:`pyvenv` was the recommended tool for creating virtual environments + for Python 3.3 and 3.4, and replaced in 3.5 by executing ``venv`` directly. + +.. highlight:: none + +On Windows, invoke the ``venv`` command as follows: + +.. code-block:: ps1con + + PS> python -m venv C:\path\to\new\virtual\environment + +The command, if run with ``-h``, will show the available options:: + + usage: venv [-h] [--system-site-packages] [--symlinks | --copies] [--clear] + [--upgrade] [--without-pip] [--prompt PROMPT] [--upgrade-deps] + [--without-scm-ignore-files] + ENV_DIR [ENV_DIR ...] + + Creates virtual Python environments in one or more target directories. + + positional arguments: + ENV_DIR A directory to create the environment in. + + options: + -h, --help show this help message and exit + --system-site-packages + Give the virtual environment access to the system + site-packages dir. + --symlinks Try to use symlinks rather than copies, when + symlinks are not the default for the platform. + --copies Try to use copies rather than symlinks, even when + symlinks are the default for the platform. + --clear Delete the contents of the environment directory + if it already exists, before environment creation. + --upgrade Upgrade the environment directory to use this + version of Python, assuming Python has been + upgraded in-place. + --without-pip Skips installing or upgrading pip in the virtual + environment (pip is bootstrapped by default) + --prompt PROMPT Provides an alternative prompt prefix for this + environment. + --upgrade-deps Upgrade core dependencies (pip) to the latest + version in PyPI + --without-scm-ignore-files + Skips adding SCM ignore files to the environment + directory (Git is supported by default). + + Once an environment has been created, you may wish to activate it, e.g. by + sourcing an activate script in its bin directory. + + +.. versionchanged:: 3.4 + Installs pip by default, added the ``--without-pip`` and ``--copies`` + options. + +.. versionchanged:: 3.4 + In earlier versions, if the target directory already existed, an error was + raised, unless the ``--clear`` or ``--upgrade`` option was provided. + +.. versionchanged:: 3.9 + Add ``--upgrade-deps`` option to upgrade pip + setuptools to the latest on PyPI. + +.. versionchanged:: 3.12 + + ``setuptools`` is no longer a core venv dependency. + +.. versionchanged:: 3.13 + + Added the ``--without-scm-ignore-files`` option. +.. versionchanged:: 3.13 + ``venv`` now creates a :file:`.gitignore` file for Git by default. + +.. note:: + While symlinks are supported on Windows, they are not recommended. Of + particular note is that double-clicking ``python.exe`` in File Explorer + will resolve the symlink eagerly and ignore the virtual environment. + +.. note:: + On Microsoft Windows, it may be required to enable the ``Activate.ps1`` + script by setting the execution policy for the user. You can do this by + issuing the following PowerShell command: + + .. code-block:: powershell + + PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser + + See `About Execution Policies + `_ + for more information. + +The created :file:`pyvenv.cfg` file also includes the +``include-system-site-packages`` key, set to ``true`` if ``venv`` is +run with the ``--system-site-packages`` option, ``false`` otherwise. + +Unless the ``--without-pip`` option is given, :mod:`ensurepip` will be +invoked to bootstrap ``pip`` into the virtual environment. + +Multiple paths can be given to ``venv``, in which case an identical virtual +environment will be created, according to the given options, at each provided +path. .. _venv-explanation: @@ -117,7 +237,7 @@ should be runnable without activating it. In order to achieve this, scripts installed into virtual environments have a "shebang" line which points to the environment's Python interpreter, -i.e. :samp:`#!/{}/bin/python`. +:samp:`#!/{}/bin/python`. This means that the script will run with that interpreter regardless of the value of :envvar:`PATH`. On Windows, "shebang" line processing is supported if you have the :ref:`launcher` installed. Thus, double-clicking an installed @@ -168,31 +288,31 @@ creation according to their needs, the :class:`EnvBuilder` class. The :class:`EnvBuilder` class accepts the following keyword arguments on instantiation: - * ``system_site_packages`` -- a Boolean value indicating that the system Python + * *system_site_packages* -- a boolean value indicating that the system Python site-packages should be available to the environment (defaults to ``False``). - * ``clear`` -- a Boolean value which, if true, will delete the contents of + * *clear* -- a boolean value which, if true, will delete the contents of any existing target directory, before creating the environment. - * ``symlinks`` -- a Boolean value indicating whether to attempt to symlink the + * *symlinks* -- a boolean value indicating whether to attempt to symlink the Python binary rather than copying. - * ``upgrade`` -- a Boolean value which, if true, will upgrade an existing + * *upgrade* -- a boolean value which, if true, will upgrade an existing environment with the running Python - for use when that Python has been upgraded in-place (defaults to ``False``). - * ``with_pip`` -- a Boolean value which, if true, ensures pip is + * *with_pip* -- a boolean value which, if true, ensures pip is installed in the virtual environment. This uses :mod:`ensurepip` with the ``--default-pip`` option. - * ``prompt`` -- a String to be used after virtual environment is activated + * *prompt* -- a string to be used after virtual environment is activated (defaults to ``None`` which means directory name of the environment would be used). If the special string ``"."`` is provided, the basename of the current directory is used as the prompt. - * ``upgrade_deps`` -- Update the base venv modules to the latest on PyPI + * *upgrade_deps* -- Update the base venv modules to the latest on PyPI - * ``scm_ignore_files`` -- Create ignore files based for the specified source + * *scm_ignore_files* -- Create ignore files based for the specified source control managers (SCM) in the iterable. Support is defined by having a method named ``create_{scm}_ignore_file``. The only value supported by default is ``"git"`` via :meth:`create_git_ignore_file`. @@ -210,10 +330,7 @@ creation according to their needs, the :class:`EnvBuilder` class. .. versionchanged:: 3.13 Added the ``scm_ignore_files`` parameter - Creators of third-party virtual environment tools will be free to use the - provided :class:`EnvBuilder` class as a base class. - - The returned env-builder is an object which has a method, ``create``: + :class:`EnvBuilder` may be used as a base class. .. method:: create(env_dir) @@ -313,14 +430,14 @@ creation according to their needs, the :class:`EnvBuilder` class. .. method:: upgrade_dependencies(context) - Upgrades the core venv dependency packages (currently ``pip``) + Upgrades the core venv dependency packages (currently :pypi:`pip`) in the environment. This is done by shelling out to the ``pip`` executable in the environment. .. versionadded:: 3.9 .. versionchanged:: 3.12 - ``setuptools`` is no longer a core venv dependency. + :pypi:`setuptools` is no longer a core venv dependency. .. method:: post_setup(context) @@ -328,25 +445,15 @@ creation according to their needs, the :class:`EnvBuilder` class. implementations to pre-install packages in the virtual environment or perform other post-creation steps. - .. versionchanged:: 3.7.2 - Windows now uses redirector scripts for ``python[w].exe`` instead of - copying the actual binaries. In 3.7.2 only :meth:`setup_python` does - nothing unless running from a build in the source tree. - - .. versionchanged:: 3.7.3 - Windows copies the redirector scripts as part of :meth:`setup_python` - instead of :meth:`setup_scripts`. This was not the case in 3.7.2. - When using symlinks, the original executables will be linked. - - In addition, :class:`EnvBuilder` provides this utility method that can be - called from :meth:`setup_scripts` or :meth:`post_setup` in subclasses to - assist in installing custom scripts into the virtual environment. - .. method:: install_scripts(context, path) + This method can be + called from :meth:`setup_scripts` or :meth:`post_setup` in subclasses to + assist in installing custom scripts into the virtual environment. + *path* is the path to a directory that should contain subdirectories - "common", "posix", "nt", each containing scripts destined for the bin - directory in the environment. The contents of "common" and the + ``common``, ``posix``, ``nt``; each containing scripts destined for the + ``bin`` directory in the environment. The contents of ``common`` and the directory corresponding to :data:`os.name` are copied after some text replacement of placeholders: @@ -371,10 +478,20 @@ creation according to their needs, the :class:`EnvBuilder` class. .. method:: create_git_ignore_file(context) Creates a ``.gitignore`` file within the virtual environment that causes - the entire directory to be ignored by the ``git`` source control manager. + the entire directory to be ignored by the Git source control manager. .. versionadded:: 3.13 + .. versionchanged:: 3.7.2 + Windows now uses redirector scripts for ``python[w].exe`` instead of + copying the actual binaries. In 3.7.2 only :meth:`setup_python` does + nothing unless running from a build in the source tree. + + .. versionchanged:: 3.7.3 + Windows copies the redirector scripts as part of :meth:`setup_python` + instead of :meth:`setup_scripts`. This was not the case in 3.7.2. + When using symlinks, the original executables will be linked. + There is also a module-level convenience function: .. function:: create(env_dir, system_site_packages=False, clear=False, \ @@ -387,16 +504,16 @@ There is also a module-level convenience function: .. versionadded:: 3.3 .. versionchanged:: 3.4 - Added the ``with_pip`` parameter + Added the *with_pip* parameter .. versionchanged:: 3.6 - Added the ``prompt`` parameter + Added the *prompt* parameter .. versionchanged:: 3.9 - Added the ``upgrade_deps`` parameter + Added the *upgrade_deps* parameter .. versionchanged:: 3.13 - Added the ``scm_ignore_files`` parameter + Added the *scm_ignore_files* parameter An example of extending ``EnvBuilder`` -------------------------------------- diff --git a/Doc/library/warnings.rst b/Doc/library/warnings.rst index 68b9ff5ce2f78c..5ea65cbd8ca94c 100644 --- a/Doc/library/warnings.rst +++ b/Doc/library/warnings.rst @@ -597,6 +597,9 @@ Available Context Managers passed to :func:`simplefilter` as if it were called immediately on entering the context. + See :ref:`warning-filter` for the meaning of the *category* and *lineno* + parameters. + .. note:: The :class:`catch_warnings` manager works by replacing and diff --git a/Doc/reference/compound_stmts.rst b/Doc/reference/compound_stmts.rst index d31fbf87b739dc..1b1e9f479cbe08 100644 --- a/Doc/reference/compound_stmts.rst +++ b/Doc/reference/compound_stmts.rst @@ -1217,9 +1217,10 @@ A function definition defines a user-defined function object (see section : | `parameter_list_no_posonly` parameter_list_no_posonly: `defparameter` ("," `defparameter`)* ["," [`parameter_list_starargs`]] : | `parameter_list_starargs` - parameter_list_starargs: "*" [`parameter`] ("," `defparameter`)* ["," ["**" `parameter` [","]]] + parameter_list_starargs: "*" [`star_parameter`] ("," `defparameter`)* ["," ["**" `parameter` [","]]] : | "**" `parameter` [","] parameter: `identifier` [":" `expression`] + star_parameter: `identifier` [":" ["*"] `expression`] defparameter: `parameter` ["=" `expression`] funcname: `identifier` @@ -1326,11 +1327,16 @@ and may only be passed by positional arguments. Parameters may have an :term:`annotation ` of the form "``: expression``" following the parameter name. Any parameter may have an annotation, even those of the form -``*identifier`` or ``**identifier``. Functions may have "return" annotation of +``*identifier`` or ``**identifier``. (As a special case, parameters of the form +``*identifier`` may have an annotation "``: *expression``".) Functions may have "return" annotation of the form "``-> expression``" after the parameter list. These annotations can be any valid Python expression. The presence of annotations does not change the semantics of a function. See :ref:`annotations` for more information on annotations. +.. versionchanged:: 3.11 + Parameters of the form "``*identifier``" may have an annotation + "``: *expression``". See :pep:`646`. + .. index:: pair: lambda; expression It is also possible to create anonymous functions (functions not bound to a @@ -1416,7 +1422,7 @@ dictionary. The class name is bound to this class object in the original local namespace. The order in which attributes are defined in the class body is preserved -in the new class's ``__dict__``. Note that this is reliable only right +in the new class's :attr:`~type.__dict__`. Note that this is reliable only right after the class is created and only for classes that were defined using the definition syntax. @@ -1447,8 +1453,8 @@ decorators. The result is then bound to the class name. A list of :ref:`type parameters ` may be given in square brackets immediately after the class's name. This indicates to static type checkers that the class is generic. At runtime, -the type parameters can be retrieved from the class's ``__type_params__`` -attribute. See :ref:`generic-classes` for more. +the type parameters can be retrieved from the class's +:attr:`~type.__type_params__` attribute. See :ref:`generic-classes` for more. .. versionchanged:: 3.12 Type parameter lists are new in Python 3.12. @@ -1661,8 +1667,8 @@ with more precision. The scope of type parameters is modeled with a special function (technically, an :ref:`annotation scope `) that wraps the creation of the generic object. -Generic functions, classes, and type aliases have a :attr:`!__type_params__` -attribute listing their type parameters. +Generic functions, classes, and type aliases have a +:attr:`~definition.__type_params__` attribute listing their type parameters. Type parameters come in three kinds: @@ -1924,5 +1930,5 @@ all annotations are instead stored as strings:: therefore the function's :term:`docstring`. .. [#] A string literal appearing as the first statement in the class body is - transformed into the namespace's ``__doc__`` item and therefore the class's - :term:`docstring`. + transformed into the namespace's :attr:`~type.__doc__` item and therefore + the class's :term:`docstring`. diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst index 21aee0b6d0e3c5..513199d21456bf 100644 --- a/Doc/reference/datamodel.rst +++ b/Doc/reference/datamodel.rst @@ -595,7 +595,6 @@ Most of these attributes check the type of the assigned value: * - .. attribute:: function.__doc__ - The function's documentation string, or ``None`` if unavailable. - Not inherited by subclasses. * - .. attribute:: function.__name__ - The function's name. @@ -846,6 +845,7 @@ this case, the special read-only attribute :attr:`!__self__` is set to the objec denoted by *alist*. (The attribute has the same semantics as it does with :attr:`other instance methods `.) +.. _classes: Classes ^^^^^^^ @@ -942,6 +942,8 @@ namespace as a dictionary object. or keep the module around while using its dictionary directly. +.. _class-attrs-and-methods: + Custom classes -------------- @@ -984,6 +986,9 @@ of a base class. A class object can be called (see above) to yield a class instance (see below). +Special attributes +^^^^^^^^^^^^^^^^^^ + .. index:: single: __name__ (class attribute) single: __module__ (class attribute) @@ -996,66 +1001,121 @@ A class object can be called (see above) to yield a class instance (see below). single: __static_attributes__ (class attribute) single: __firstlineno__ (class attribute) -Special attributes: +.. list-table:: + :header-rows: 1 - :attr:`~definition.__name__` - The class name. + * - Attribute + - Meaning - :attr:`__module__` - The name of the module in which the class was defined. + * - .. attribute:: type.__name__ + - The class's name. + See also: :attr:`__name__ attributes `. - :attr:`~object.__dict__` - The dictionary containing the class's namespace. + * - .. attribute:: type.__qualname__ + - The class's :term:`qualified name`. + See also: :attr:`__qualname__ attributes `. - :attr:`~class.__bases__` - A tuple containing the base classes, in the order of - their occurrence in the base class list. + * - .. attribute:: type.__module__ + - The name of the module in which the class was defined. - :attr:`__doc__` - The class's documentation string, or ``None`` if undefined. + * - .. attribute:: type.__dict__ + - A :class:`mapping proxy ` + providing a read-only view of the class's namespace. + See also: :attr:`__dict__ attributes `. - :attr:`~object.__annotations__` - A dictionary containing - :term:`variable annotations ` - collected during class body execution. For best practices on - working with :attr:`~object.__annotations__`, please see - :mod:`annotationlib`. + * - .. attribute:: type.__bases__ + - A :class:`tuple` containing the class's bases. + In most cases, for a class defined as ``class X(A, B, C)``, + ``X.__bases__`` will be exactly equal to ``(A, B, C)``. - .. warning:: + * - .. attribute:: type.__doc__ + - The class's documentation string, or ``None`` if undefined. + Not inherited by subclasses. - Accessing the :attr:`~object.__annotations__` attribute of a class - object directly may yield incorrect results in the presence of - metaclasses. Use :func:`annotationlib.get_annotations` to - retrieve class annotations safely. + * - .. attribute:: type.__annotations__ + - A dictionary containing + :term:`variable annotations ` + collected during class body execution. See also: + :attr:`__annotations__ attributes `. - .. versionchanged:: 3.14 - Annotations are now :ref:`lazily evaluated `. - See :pep:`649`. + For best practices on working with :attr:`~object.__annotations__`, + please see :mod:`annotationlib`. - :attr:`~object.__annotate__` - The :term:`annotate function` for this class, or ``None`` - if the class has no annotations. See :attr:`object.__annotate__`. + .. caution:: - .. warning:: + Accessing the :attr:`!__annotations__` attribute of a class + object directly may yield incorrect results in the presence of + metaclasses. In addition, the attribute may not exist for + some classes. Use :func:`annotationlib.get_annotations` to + retrieve class annotations safely. - Accessing the :attr:`~object.__annotate__` attribute of a class - object directly may yield incorrect results in the presence of - metaclasses. Use :func:`annotationlib.get_annotate_function` to - retrieve the annotate function safely. + .. versionchanged:: 3.14 + Annotations are now :ref:`lazily evaluated `. + See :pep:`649`. - .. versionadded:: 3.14 + * - .. method:: type.__annotate__ + - The :term:`annotate function` for this class, or ``None`` + if the class has no annotations. + See also: :attr:`__annotate__ attributes `. - :attr:`__type_params__` - A tuple containing the :ref:`type parameters ` of - a :ref:`generic class `. + .. caution:: - :attr:`~class.__static_attributes__` - A tuple containing names of attributes of this class which are assigned - through ``self.X`` from any function in its body. + Accessing the :attr:`!__annotate__` attribute of a class + object directly may yield incorrect results in the presence of + metaclasses. Use :func:`annotationlib.get_annotate_function` to + retrieve the annotate function safely. - :attr:`__firstlineno__` - The line number of the first line of the class definition, including decorators. + .. versionadded:: 3.14 + + * - .. attribute:: type.__type_params__ + - A :class:`tuple` containing the :ref:`type parameters ` of + a :ref:`generic class `. + + .. versionadded:: 3.12 + + * - .. attribute:: type.__static_attributes__ + - A :class:`tuple` containing names of attributes of this class which are + assigned through ``self.X`` from any function in its body. + + .. versionadded:: 3.13 + + * - .. attribute:: type.__firstlineno__ + - The line number of the first line of the class definition, + including decorators. + Setting the :attr:`__module__` attribute removes the + :attr:`!__firstlineno__` item from the type's dictionary. + + .. versionadded:: 3.13 + + * - .. attribute:: type.__mro__ + - The :class:`tuple` of classes that are considered when looking for + base classes during method resolution. + + +Special methods +^^^^^^^^^^^^^^^ + +In addition to the special attributes described above, all Python classes also +have the following two methods available: + +.. method:: type.mro + + This method can be overridden by a metaclass to customize the method + resolution order for its instances. It is called at class instantiation, + and its result is stored in :attr:`~type.__mro__`. + +.. method:: type.__subclasses__ + + Each class keeps a list of weak references to its immediate subclasses. This + method returns a list of all those references still alive. The list is in + definition order. Example: + .. doctest:: + + >>> class A: pass + >>> class B(A): pass + >>> A.__subclasses__() + [] Class instances --------------- @@ -1095,12 +1155,22 @@ dictionary directly. Class instances can pretend to be numbers, sequences, or mappings if they have methods with certain special names. See section :ref:`specialnames`. +Special attributes +^^^^^^^^^^^^^^^^^^ + .. index:: single: __dict__ (instance attribute) single: __class__ (instance attribute) -Special attributes: :attr:`~object.__dict__` is the attribute dictionary; -:attr:`~instance.__class__` is the instance's class. +.. attribute:: object.__class__ + + The class to which a class instance belongs. + +.. attribute:: object.__dict__ + + A dictionary or other mapping object used to store an object's (writable) + attributes. Not all instances have a :attr:`!__dict__` attribute; see the + section on :ref:`slots` for more details. I/O objects (also known as file objects) @@ -2330,9 +2400,9 @@ Notes on using *__slots__*: * The action of a *__slots__* declaration is not limited to the class where it is defined. *__slots__* declared in parents are available in - child classes. However, child subclasses will get a :attr:`~object.__dict__` and - *__weakref__* unless they also define *__slots__* (which should only - contain names of any *additional* slots). + child classes. However, instances of a child subclass will get a + :attr:`~object.__dict__` and *__weakref__* unless the subclass also defines + *__slots__* (which should only contain names of any *additional* slots). * If a class defines a slot also defined in a base class, the instance variable defined by the base class slot is inaccessible (except by retrieving its @@ -2351,7 +2421,7 @@ Notes on using *__slots__*: to provide per-attribute docstrings that will be recognised by :func:`inspect.getdoc` and displayed in the output of :func:`help`. -* :attr:`~instance.__class__` assignment works only if both classes have the +* :attr:`~object.__class__` assignment works only if both classes have the same *__slots__*. * :ref:`Multiple inheritance ` with multiple slotted parent @@ -2617,7 +2687,7 @@ in the local namespace as the defined class. When a new class is created by ``type.__new__``, the object provided as the namespace parameter is copied to a new ordered mapping and the original object is discarded. The new copy is wrapped in a read-only proxy, which -becomes the :attr:`~object.__dict__` attribute of the class object. +becomes the :attr:`~type.__dict__` attribute of the class object. .. seealso:: @@ -2645,14 +2715,14 @@ order to allow the addition of Abstract Base Classes (ABCs) as "virtual base classes" to any class or type (including built-in types), including other ABCs. -.. method:: class.__instancecheck__(self, instance) +.. method:: type.__instancecheck__(self, instance) Return true if *instance* should be considered a (direct or indirect) instance of *class*. If defined, called to implement ``isinstance(instance, class)``. -.. method:: class.__subclasscheck__(self, subclass) +.. method:: type.__subclasscheck__(self, subclass) Return true if *subclass* should be considered a (direct or indirect) subclass of *class*. If defined, called to implement ``issubclass(subclass, @@ -2668,8 +2738,8 @@ case the instance is itself a class. :pep:`3119` - Introducing Abstract Base Classes Includes the specification for customizing :func:`isinstance` and - :func:`issubclass` behavior through :meth:`~class.__instancecheck__` and - :meth:`~class.__subclasscheck__`, with motivation for this functionality + :func:`issubclass` behavior through :meth:`~type.__instancecheck__` and + :meth:`~type.__subclasscheck__`, with motivation for this functionality in the context of adding Abstract Base Classes (see the :mod:`abc` module) to the language. diff --git a/Doc/reference/executionmodel.rst b/Doc/reference/executionmodel.rst index a02b5153ef0620..99cb09d09331d8 100644 --- a/Doc/reference/executionmodel.rst +++ b/Doc/reference/executionmodel.rst @@ -226,8 +226,8 @@ Annotation scopes differ from function scopes in the following ways: statements in inner scopes. This includes only type parameters, as no other syntactic elements that can appear within annotation scopes can introduce new names. * While annotation scopes have an internal name, that name is not reflected in the - :term:`__qualname__ ` of objects defined within the scope. - Instead, the :attr:`!__qualname__` + :term:`qualified name` of objects defined within the scope. + Instead, the :attr:`~definition.__qualname__` of such objects is as if the object were defined in the enclosing scope. .. versionadded:: 3.12 diff --git a/Doc/reference/expressions.rst b/Doc/reference/expressions.rst index 1ed715109ca5f7..ab72ad49d041e1 100644 --- a/Doc/reference/expressions.rst +++ b/Doc/reference/expressions.rst @@ -104,8 +104,8 @@ identifier is used but only the following private identifiers are mangled: - Any name used as the name of a variable that is assigned or read or any name of an attribute being accessed. - The ``__name__`` attribute of nested functions, classes, and type aliases - is however not mangled. + The :attr:`~definition.__name__` attribute of nested functions, classes, and + type aliases is however not mangled. - The name of imported modules, e.g., ``__spam`` in ``import __spam``. If the module is part of a package (i.e., its name contains a dot), @@ -284,7 +284,7 @@ A list display is a possibly empty series of expressions enclosed in square brackets: .. productionlist:: python-grammar - list_display: "[" [`starred_list` | `comprehension`] "]" + list_display: "[" [`flexible_expression_list` | `comprehension`] "]" A list display yields a new list object, the contents being specified by either a list of expressions or a comprehension. When a comma-separated list of @@ -309,7 +309,7 @@ A set display is denoted by curly braces and distinguishable from dictionary displays by the lack of colons separating keys and values: .. productionlist:: python-grammar - set_display: "{" (`starred_list` | `comprehension`) "}" + set_display: "{" (`flexible_expression_list` | `comprehension`) "}" A set display yields a new mutable set object, the contents being specified by either a sequence of expressions or a comprehension. When a comma-separated @@ -454,7 +454,7 @@ Yield expressions .. productionlist:: python-grammar yield_atom: "(" `yield_expression` ")" yield_from: "yield" "from" `expression` - yield_expression: "yield" `expression_list` | `yield_from` + yield_expression: "yield" `yield_list` | `yield_from` The yield expression is used when defining a :term:`generator` function or an :term:`asynchronous generator` function and @@ -485,9 +485,9 @@ When a generator function is called, it returns an iterator known as a generator. That generator then controls the execution of the generator function. The execution starts when one of the generator's methods is called. At that time, the execution proceeds to the first yield expression, where it is -suspended again, returning the value of :token:`~python-grammar:expression_list` +suspended again, returning the value of :token:`~python-grammar:yield_list` to the generator's caller, -or ``None`` if :token:`~python-grammar:expression_list` is omitted. +or ``None`` if :token:`~python-grammar:yield_list` is omitted. By suspended, we mean that all local state is retained, including the current bindings of local variables, the instruction pointer, the internal evaluation stack, and the state of any exception handling. @@ -576,7 +576,7 @@ is already executing raises a :exc:`ValueError` exception. :meth:`~generator.__next__` method, the current yield expression always evaluates to :const:`None`. The execution then continues to the next yield expression, where the generator is suspended again, and the value of the - :token:`~python-grammar:expression_list` is returned to :meth:`__next__`'s + :token:`~python-grammar:yield_list` is returned to :meth:`__next__`'s caller. If the generator exits without yielding another value, a :exc:`StopIteration` exception is raised. @@ -695,7 +695,7 @@ how a generator object would be used in a :keyword:`for` statement. Calling one of the asynchronous generator's methods returns an :term:`awaitable` object, and the execution starts when this object is awaited on. At that time, the execution proceeds to the first yield expression, where it is suspended -again, returning the value of :token:`~python-grammar:expression_list` to the +again, returning the value of :token:`~python-grammar:yield_list` to the awaiting coroutine. As with a generator, suspension means that all local state is retained, including the current bindings of local variables, the instruction pointer, the internal evaluation stack, and the state of any exception handling. @@ -759,7 +759,7 @@ which are used to control the execution of a generator function. asynchronous generator function is resumed with an :meth:`~agen.__anext__` method, the current yield expression always evaluates to :const:`None` in the returned awaitable, which when run will continue to the next yield - expression. The value of the :token:`~python-grammar:expression_list` of the + expression. The value of the :token:`~python-grammar:yield_list` of the yield expression is the value of the :exc:`StopIteration` exception raised by the completing coroutine. If the asynchronous generator exits without yielding another value, the awaitable instead raises a @@ -892,7 +892,7 @@ will generally select an element from the container. The subscription of a :ref:`GenericAlias ` object. .. productionlist:: python-grammar - subscription: `primary` "[" `expression_list` "]" + subscription: `primary` "[" `flexible_expression_list` "]" When an object is subscripted, the interpreter will evaluate the primary and the expression list. @@ -904,9 +904,13 @@ primary is subscripted, the evaluated result of the expression list will be passed to one of these methods. For more details on when ``__class_getitem__`` is called instead of ``__getitem__``, see :ref:`classgetitem-versus-getitem`. -If the expression list contains at least one comma, it will evaluate to a -:class:`tuple` containing the items of the expression list. Otherwise, the -expression list will evaluate to the value of the list's sole member. +If the expression list contains at least one comma, or if any of the expressions +are starred, the expression list will evaluate to a :class:`tuple` containing +the items of the expression list. Otherwise, the expression list will evaluate +to the value of the list's sole member. + +.. versionchanged:: 3.11 + Expressions in an expression list may be starred. See :pep:`646`. For built-in objects, there are two types of objects that support subscription via :meth:`~object.__getitem__`: @@ -1803,6 +1807,7 @@ returns a boolean value regardless of the type of its argument single: assignment expression single: walrus operator single: named expression + pair: assignment; expression Assignment expressions ====================== @@ -1905,10 +1910,12 @@ Expression lists single: , (comma); expression list .. productionlist:: python-grammar + starred_expression: ["*"] `or_expr` + flexible_expression: `assignment_expression` | `starred_expression` + flexible_expression_list: `flexible_expression` ("," `flexible_expression`)* [","] + starred_expression_list: `starred_expression` ("," `starred_expression`)* [","] expression_list: `expression` ("," `expression`)* [","] - starred_list: `starred_item` ("," `starred_item`)* [","] - starred_expression: `expression` | (`starred_item` ",")* [`starred_item`] - starred_item: `assignment_expression` | "*" `or_expr` + yield_list: `expression_list` | `starred_expression` "," [`starred_expression_list`] .. index:: pair: object; tuple @@ -1929,6 +1936,9 @@ the unpacking. .. versionadded:: 3.5 Iterable unpacking in expression lists, originally proposed by :pep:`448`. +.. versionadded:: 3.11 + Any item in an expression list may be starred. See :pep:`646`. + .. index:: pair: trailing; comma A trailing comma is required only to create a one-item tuple, diff --git a/Doc/reference/import.rst b/Doc/reference/import.rst index 19b8aa05072c73..0b9d1c233d182a 100644 --- a/Doc/reference/import.rst +++ b/Doc/reference/import.rst @@ -544,7 +544,7 @@ the module. It is **strongly** recommended that you rely on :attr:`__spec__` and its attributes instead of any of the other individual attributes -listed below. +listed below, except :attr:`__name__`. .. attribute:: __name__ diff --git a/Doc/tools/extensions/patchlevel.py b/Doc/tools/extensions/patchlevel.py index f2df6db47a2227..53ea1bf47b8fd3 100644 --- a/Doc/tools/extensions/patchlevel.py +++ b/Doc/tools/extensions/patchlevel.py @@ -74,4 +74,4 @@ def get_version_info(): if __name__ == "__main__": - print(format_version_info(get_header_version_info())[1]) + print(format_version_info(get_header_version_info())[0]) diff --git a/Doc/tools/extensions/pyspecific.py b/Doc/tools/extensions/pyspecific.py index 791d9296a975e7..c89b1693343b4e 100644 --- a/Doc/tools/extensions/pyspecific.py +++ b/Doc/tools/extensions/pyspecific.py @@ -259,7 +259,22 @@ def run(self): return PyMethod.run(self) -# Support for documenting version of removal in deprecations +# Support for documenting version of changes, additions, deprecations + +def expand_version_arg(argument, release): + """Expand "next" to the current version""" + if argument == 'next': + return sphinx_gettext('{} (unreleased)').format(release) + return argument + + +class PyVersionChange(VersionChange): + def run(self): + # Replace the 'next' special token with the current development version + self.arguments[0] = expand_version_arg(self.arguments[0], + self.config.release) + return super().run() + class DeprecatedRemoved(VersionChange): required_arguments = 2 @@ -270,8 +285,12 @@ class DeprecatedRemoved(VersionChange): def run(self): # Replace the first two arguments (deprecated version and removed version) # with a single tuple of both versions. - version_deprecated = self.arguments[0] + version_deprecated = expand_version_arg(self.arguments[0], + self.config.release) version_removed = self.arguments.pop(1) + if version_removed == 'next': + raise ValueError( + 'deprecated-removed:: second argument cannot be `next`') self.arguments[0] = version_deprecated, version_removed # Set the label based on if we have reached the removal version @@ -474,6 +493,10 @@ def setup(app): app.add_role('gh', gh_issue_role) app.add_directive('impl-detail', ImplementationDetail) app.add_directive('availability', Availability) + app.add_directive('versionadded', PyVersionChange, override=True) + app.add_directive('versionchanged', PyVersionChange, override=True) + app.add_directive('versionremoved', PyVersionChange, override=True) + app.add_directive('deprecated', PyVersionChange, override=True) app.add_directive('deprecated-removed', DeprecatedRemoved) app.add_builder(PydocTopicsBuilder) app.add_object_type('opcode', 'opcode', '%s (opcode)', parse_opcode_signature) diff --git a/Doc/tools/templates/download.html b/Doc/tools/templates/download.html index b4217908cc63c9..45ec436fee72d7 100644 --- a/Doc/tools/templates/download.html +++ b/Doc/tools/templates/download.html @@ -1,17 +1,19 @@ {% extends "layout.html" %} {% set title = _('Download') %} {% if daily is defined %} - {% set dlbase = pathto('archives', 1) %} + {% set dl_base = pathto('archives', resource=True) %} + {% set dl_version = version %} {% else %} {# The link below returns HTTP 404 until the first related alpha release. This is expected; use daily documentation builds for CPython development. #} - {% set dlbase = 'https://www.python.org/ftp/python/doc/' + release %} + {% set dl_base = 'https://www.python.org/ftp/python/doc/' + release %} + {% set dl_version = release %} {% endif %} {% block body %} -

{% trans %}Download Python {{ release }} Documentation{% endtrans %}

+

{% trans %}Download Python {{ dl_version }} Documentation{% endtrans %}

{% if last_updated %}

{% trans %}Last updated on: {{ last_updated }}.{% endtrans %}

{% endif %} @@ -26,27 +28,27 @@

{% trans %}Download Python {{ release }} Documentation{% endtrans %}

{% trans %}PDF{% endtrans %} - {% trans download_size="17" %}Download (ca. {{ download_size }} MiB){% endtrans %} - {% trans download_size="17" %}Download (ca. {{ download_size }} MiB){% endtrans %} + {% trans download_size="17" %}Download (ca. {{ download_size }} MiB){% endtrans %} + {% trans download_size="17" %}Download (ca. {{ download_size }} MiB){% endtrans %} {% trans %}HTML{% endtrans %} - {% trans download_size="13" %}Download (ca. {{ download_size }} MiB){% endtrans %} - {% trans download_size="8" %}Download (ca. {{ download_size }} MiB){% endtrans %} + {% trans download_size="13" %}Download (ca. {{ download_size }} MiB){% endtrans %} + {% trans download_size="8" %}Download (ca. {{ download_size }} MiB){% endtrans %} {% trans %}Plain text{% endtrans %} - {% trans download_size="4" %}Download (ca. {{ download_size }} MiB){% endtrans %} - {% trans download_size="3" %}Download (ca. {{ download_size }} MiB){% endtrans %} + {% trans download_size="4" %}Download (ca. {{ download_size }} MiB){% endtrans %} + {% trans download_size="3" %}Download (ca. {{ download_size }} MiB){% endtrans %} {% trans %}Texinfo{% endtrans %} - {% trans download_size="9" %}Download (ca. {{ download_size }} MiB){% endtrans %} - {% trans download_size="7" %}Download (ca. {{ download_size }} MiB){% endtrans %} + {% trans download_size="9" %}Download (ca. {{ download_size }} MiB){% endtrans %} + {% trans download_size="7" %}Download (ca. {{ download_size }} MiB){% endtrans %} {% trans %}EPUB{% endtrans %} - {% trans download_size="6" %}Download (ca. {{ download_size }} MiB){% endtrans %} + {% trans download_size="6" %}Download (ca. {{ download_size }} MiB){% endtrans %} diff --git a/Doc/tutorial/classes.rst b/Doc/tutorial/classes.rst index 675faa8c52477d..492568961d8a51 100644 --- a/Doc/tutorial/classes.rst +++ b/Doc/tutorial/classes.rst @@ -276,8 +276,8 @@ definition looked like this:: then ``MyClass.i`` and ``MyClass.f`` are valid attribute references, returning an integer and a function object, respectively. Class attributes can also be assigned to, so you can change the value of ``MyClass.i`` by assignment. -:attr:`!__doc__` is also a valid attribute, returning the docstring belonging to -the class: ``"A simple example class"``. +:attr:`~type.__doc__` is also a valid attribute, returning the docstring +belonging to the class: ``"A simple example class"``. Class *instantiation* uses function notation. Just pretend that the class object is a parameterless function that returns a new instance of the class. @@ -932,6 +932,6 @@ Examples:: .. [#] Except for one thing. Module objects have a secret read-only attribute called :attr:`~object.__dict__` which returns the dictionary used to implement the module's - namespace; the name :attr:`~object.__dict__` is an attribute but not a global name. + namespace; the name ``__dict__`` is an attribute but not a global name. Obviously, using this violates the abstraction of namespace implementation, and should be restricted to things like post-mortem debuggers. diff --git a/Doc/tutorial/controlflow.rst b/Doc/tutorial/controlflow.rst index 677d7ca02c3f2f..fd765e58ff2485 100644 --- a/Doc/tutorial/controlflow.rst +++ b/Doc/tutorial/controlflow.rst @@ -160,21 +160,59 @@ arguments. In chapter :ref:`tut-structures`, we will discuss in more detail abo .. _tut-break: -:keyword:`!break` and :keyword:`!continue` Statements, and :keyword:`!else` Clauses on Loops -============================================================================================ +:keyword:`!break` and :keyword:`!continue` Statements +===================================================== The :keyword:`break` statement breaks out of the innermost enclosing -:keyword:`for` or :keyword:`while` loop. +:keyword:`for` or :keyword:`while` loop:: -A :keyword:`!for` or :keyword:`!while` loop can include an :keyword:`!else` clause. + >>> for n in range(2, 10): + ... for x in range(2, n): + ... if n % x == 0: + ... print(f"{n} equals {x} * {n//x}") + ... break + ... + 4 equals 2 * 2 + 6 equals 2 * 3 + 8 equals 2 * 4 + 9 equals 3 * 3 + +The :keyword:`continue` statement continues with the next +iteration of the loop:: + + >>> for num in range(2, 10): + ... if num % 2 == 0: + ... print(f"Found an even number {num}") + ... continue + ... print(f"Found an odd number {num}") + ... + Found an even number 2 + Found an odd number 3 + Found an even number 4 + Found an odd number 5 + Found an even number 6 + Found an odd number 7 + Found an even number 8 + Found an odd number 9 + +.. _tut-for-else: + +:keyword:`!else` Clauses on Loops +================================= + +In a :keyword:`!for` or :keyword:`!while` loop the :keyword:`!break` statement +may be paired with an :keyword:`!else` clause. If the loop finishes without +executing the :keyword:`!break`, the :keyword:`!else` clause executes. In a :keyword:`for` loop, the :keyword:`!else` clause is executed -after the loop reaches its final iteration. +after the loop finishes its final iteration, that is, if no break occurred. In a :keyword:`while` loop, it's executed after the loop's condition becomes false. -In either kind of loop, the :keyword:`!else` clause is **not** executed -if the loop was terminated by a :keyword:`break`. +In either kind of loop, the :keyword:`!else` clause is **not** executed if the +loop was terminated by a :keyword:`break`. Of course, other ways of ending the +loop early, such as a :keyword:`return` or a raised exception, will also skip +execution of the :keyword:`else` clause. This is exemplified in the following :keyword:`!for` loop, which searches for prime numbers:: @@ -198,32 +236,19 @@ which searches for prime numbers:: 9 equals 3 * 3 (Yes, this is the correct code. Look closely: the ``else`` clause belongs to -the :keyword:`for` loop, **not** the :keyword:`if` statement.) - -When used with a loop, the ``else`` clause has more in common with the -``else`` clause of a :keyword:`try` statement than it does with that of -:keyword:`if` statements: a :keyword:`try` statement's ``else`` clause runs -when no exception occurs, and a loop's ``else`` clause runs when no ``break`` -occurs. For more on the :keyword:`!try` statement and exceptions, see -:ref:`tut-handling`. - -The :keyword:`continue` statement, also borrowed from C, continues with the next -iteration of the loop:: - - >>> for num in range(2, 10): - ... if num % 2 == 0: - ... print("Found an even number", num) - ... continue - ... print("Found an odd number", num) - ... - Found an even number 2 - Found an odd number 3 - Found an even number 4 - Found an odd number 5 - Found an even number 6 - Found an odd number 7 - Found an even number 8 - Found an odd number 9 +the ``for`` loop, **not** the ``if`` statement.) + +One way to think of the else clause is to imagine it paired with the ``if`` +inside the loop. As the loop executes, it will run a sequence like +if/if/if/else. The ``if`` is inside the loop, encountered a number of times. If +the condition is ever true, a ``break`` will happen. If the condition is never +true, the ``else`` clause outside the loop will execute. + +When used with a loop, the ``else`` clause has more in common with the ``else`` +clause of a :keyword:`try` statement than it does with that of ``if`` +statements: a ``try`` statement's ``else`` clause runs when no exception +occurs, and a loop's ``else`` clause runs when no ``break`` occurs. For more on +the ``try`` statement and exceptions, see :ref:`tut-handling`. .. _tut-pass: diff --git a/Doc/using/mac.rst b/Doc/using/mac.rst index 2dfac0758435d1..4b6c884f3d4f25 100644 --- a/Doc/using/mac.rst +++ b/Doc/using/mac.rst @@ -2,140 +2,223 @@ .. _using-on-mac: ********************* -Using Python on a Mac +Using Python on macOS ********************* -:Author: Bob Savage +.. sectionauthor:: Bob Savage +.. sectionauthor:: Ned Deily +This document aims to give an overview of macOS-specific behavior you should +know about to get started with Python on Mac computers. +Python on a Mac running macOS is very similar to Python on other Unix-derived platforms, +but there are some differences in installation and some features. -Python on a Mac running macOS is in principle very similar to Python on -any other Unix platform, but there are a number of additional features such as -the integrated development environment (IDE) and the Package Manager that are -worth pointing out. +There are various ways to obtain and install Python for macOS. +Pre-built versions of the most recent versions of Python are available +from a number of distributors. Much of this document describes use of +the Pythons provided by the CPython release team for download from +the `python.org website `_. See +:ref:`alternative_bundles` for some other options. +.. |usemac_x_dot_y| replace:: 3.13 +.. |usemac_python_x_dot_y_literal| replace:: ``python3.13`` +.. |usemac_python_x_dot_y_t_literal| replace:: ``python3.13t`` +.. |usemac_python_x_dot_y_t_literal_config| replace:: ``python3.13t-config`` +.. |usemac_applications_folder_name| replace:: ``Python 3.13`` +.. |usemac_applications_folder_version| replace:: ``/Applications/Python 3.13/`` .. _getting-osx: .. _getting-and-installing-macpython: -Getting and Installing Python -============================= +Using Python for macOS from ``python.org`` +========================================== -macOS used to come with Python 2.7 pre-installed between versions -10.8 and `12.3 `_. -You are invited to install the most recent version of Python 3 from the `Python -website `__. -A current "universal2 binary" build of Python, which runs natively on the Mac's -new Apple Silicon and legacy Intel processors, is available there. +Installation steps +------------------ -What you get after installing is a number of things: +For `current Python versions `_ +(other than those in ``security`` status), the release team produces a +**Python for macOS** installer package for each new release. +A list of available installers +is available `here `_. +We recommend using the most recent supported Python version where possible. +Current installers provide a +`universal2 binary `_ build +of Python which runs natively on all Macs (Apple Silicon and Intel) that are +supported by a wide range of macOS versions, +currently typically from at least **macOS 10.13 High Sierra** on. -* A |python_version_literal| folder in your :file:`Applications` folder. In here - you find IDLE, the development environment that is a standard part of official +The downloaded file is a standard macOS installer package file (``.pkg``). +File integrity information (checksum, size, sigstore signature, etc) for each file is included +on the release download page. Installer packages and their contents are signed and notarized +with ``Python Software Foundation`` Apple Developer ID certificates +to meet `macOS Gatekeeper requirements `_. + +For a default installation, double-click on the downloaded installer package file. +This should launch the standard macOS Installer app and display the first of several +installer windows steps. + +.. image:: mac_installer_01_introduction.png + +Clicking on the **Continue** button brings up the **Read Me** for this installer. +Besides other important information, the **Read Me** documents which Python version is +going to be installed and on what versions of macOS it is supported. You may need +to scroll through to read the whole file. By default, this **Read Me** will also be +installed in |usemac_applications_folder_version| and available to read anytime. + +.. image:: mac_installer_02_readme.png + +Clicking on **Continue** proceeds to display the license for Python and for +other included software. You will then need to **Agree** to the license terms +before proceeding to the next step. This license file will also be installed +and available to be read later. + +.. image:: mac_installer_03_license.png + +After the license terms are accepted, the next step is the **Installation Type** +display. For most uses, the standard set of installation operations is appropriate. + +.. image:: mac_installer_04_installation_type.png + +By pressing the **Customize** button, you can choose to omit or select certain package +components of the installer. Click on each package name to see a description of +what it installs. +To also install support for the optional experimental free-threaded feature, +see :ref:`install-freethreaded-macos`. + +.. image:: mac_installer_05_custom_install.png + +In either case, clicking **Install** will begin the install process by asking +permission to install new software. A macOS user name with ``Administrator`` privilege +is needed as the installed Python will be available to all users of the Mac. + +When the installation is complete, the **Summary** window will appear. + +.. image:: mac_installer_06_summary.png + +Double-click on the :command:`Install Certificates.command` +icon or file in the |usemac_applications_folder_version| window to complete the +installation. + +.. image:: mac_installer_07_applications.png + +This will open a temporary :program:`Terminal` shell window that +will use the new Python to download and install SSL root certificates +for its use. + +.. image:: mac_installer_08_install_certificates.png + +If ``Successfully installed certifi`` and ``update complete`` appears +in the terminal window, the installation is complete. +Close this terminal window and the installer window. + +A default install will include: + +* A |usemac_applications_folder_name| folder in your :file:`Applications` folder. In here + you find :program:`IDLE`, the development environment that is a standard part of official Python distributions; and :program:`Python Launcher`, which handles double-clicking Python - scripts from the Finder. + scripts from the macOS `Finder `_. * A framework :file:`/Library/Frameworks/Python.framework`, which includes the Python executable and libraries. The installer adds this location to your shell - path. To uninstall Python, you can remove these three things. A - symlink to the Python executable is placed in :file:`/usr/local/bin/`. + path. To uninstall Python, you can remove these three things. + Symlinks to the Python executable are placed in :file:`/usr/local/bin/`. .. note:: - On macOS 10.8-12.3, the Apple-provided build of Python is installed in - :file:`/System/Library/Frameworks/Python.framework` and :file:`/usr/bin/python`, - respectively. You should never modify or delete these, as they are - Apple-controlled and are used by Apple- or third-party software. Remember that - if you choose to install a newer Python version from python.org, you will have - two different but functional Python installations on your computer, so it will - be important that your paths and usages are consistent with what you want to do. - -IDLE includes a Help menu that allows you to access Python documentation. If you -are completely new to Python you should start reading the tutorial introduction -in that document. - -If you are familiar with Python on other Unix platforms you should read the -section on running Python scripts from the Unix shell. - + Recent versions of macOS include a :command:`python3` command in :file:`/usr/bin/python3` + that links to a usually older and incomplete version of Python provided by and for use by + the Apple development tools, :program:`Xcode` or the :program:`Command Line Tools for Xcode`. + You should never modify or attempt to delete this installation, as it is + Apple-controlled and is used by Apple-provided or third-party software. If + you choose to install a newer Python version from ``python.org``, you will have + two different but functional Python installations on your computer that + can co-exist. The default installer options should ensure that its :command:`python3` + will be used instead of the system :command:`python3`. How to run a Python script -------------------------- -Your best way to get started with Python on macOS is through the IDLE -integrated development environment; see section :ref:`ide` and use the Help menu -when the IDE is running. +There are two ways to invoke the Python interpreter. +If you are familiar with using a Unix shell in a terminal +window, you can invoke |usemac_python_x_dot_y_literal| or ``python3`` optionally +followed by one or more command line options (described in :ref:`using-on-general`). +The Python tutorial also has a useful section on +:ref:`using Python interactively from a shell `. + +You can also invoke the interpreter through an integrated +development environment. +:ref:`idle` is a basic editor and interpreter environment +which is included with the standard distribution of Python. +:program:`IDLE` includes a Help menu that allows you to access Python documentation. If you +are completely new to Python, you can read the tutorial introduction +in that document. -If you want to run Python scripts from the Terminal window command line or from -the Finder you first need an editor to create your script. macOS comes with a -number of standard Unix command line editors, :program:`vim` -:program:`nano` among them. If you want a more Mac-like editor, -:program:`BBEdit` from Bare Bones Software (see -https://www.barebones.com/products/bbedit/index.html) are good choices, as is -:program:`TextMate` (see https://macromates.com). Other editors include -:program:`MacVim` (https://macvim.org) and :program:`Aquamacs` -(https://aquamacs.org). +There are many other editors and IDEs available, see :ref:`editors` +for more information. -To run your script from the Terminal window you must make sure that -:file:`/usr/local/bin` is in your shell search path. +To run a Python script file from the terminal window, you can +invoke the interpreter with the name of the script file: -To run your script from the Finder you have two options: + |usemac_python_x_dot_y_literal| ``myscript.py`` + +To run your script from the Finder, you can either: * Drag it to :program:`Python Launcher`. * Select :program:`Python Launcher` as the default application to open your - script (or any ``.py`` script) through the finder Info window and double-click it. + script (or any ``.py`` script) through the Finder Info window and double-click it. :program:`Python Launcher` has various preferences to control how your script is launched. Option-dragging allows you to change these for one invocation, or use - its Preferences menu to change things globally. - - -.. _osx-gui-scripts: - -Running scripts with a GUI --------------------------- - -With older versions of Python, there is one macOS quirk that you need to be -aware of: programs that talk to the Aqua window manager (in other words, -anything that has a GUI) need to be run in a special way. Use :program:`pythonw` -instead of :program:`python` to start such scripts. + its ``Preferences`` menu to change things globally. -With Python 3.9, you can use either :program:`python` or :program:`pythonw`. +Be aware that running the script directly from the macOS Finder might +produce different results than when running from a terminal window as +the script will not be run in the usual shell environment including +any setting of environment variables in shell profiles. +And, as with any other script or program, +be certain of what you are about to run. +.. _alternative_bundles: -Configuration -------------- +Alternative Distributions +========================= -Python on macOS honors all standard Unix environment variables such as -:envvar:`PYTHONPATH`, but setting these variables for programs started from the -Finder is non-standard as the Finder does not read your :file:`.profile` or -:file:`.cshrc` at startup. You need to create a file -:file:`~/.MacOSX/environment.plist`. See Apple's -`Technical Q&A QA1067 `__ -for details. +Besides the standard ``python.org`` for macOS installer, there are third-party +distributions for macOS that may include additional functionality. +Some popular distributions and their key features: -For more information on installation Python packages, see section -:ref:`mac-package-manager`. +`ActivePython `_ + Installer with multi-platform compatibility, documentation +`Anaconda `_ + Popular scientific modules (such as numpy, scipy, and pandas) and the + ``conda`` package manager. -.. _ide: +`Homebrew `_ + Package manager for macOS including multiple versions of Python and many + third-party Python-based packages (including numpy, scipy, and pandas). -The IDE -======= - -Python ships with the standard IDLE development environment. A good -introduction to using IDLE can be found at -https://www.hashcollision.org/hkn/python/idle_intro/index.html. +`MacPorts `_ + Another package manager for macOS including multiple versions of Python and many + third-party Python-based packages. May include pre-built versions of Python and + many packages for older versions of macOS. +Note that distributions might not include the latest versions of Python or +other libraries, and are not maintained or supported by the core Python team. .. _mac-package-manager: Installing Additional Python Packages ===================================== -This section has moved to the `Python Packaging User Guide`_. +Refer to the `Python Packaging User Guide`_ for more information. .. _Python Packaging User Guide: https://packaging.python.org/en/latest/tutorials/installing-packages/ +.. _osx-gui-scripts: + .. _gui-programming-on-the-mac: GUI Programming @@ -143,36 +226,209 @@ GUI Programming There are several options for building GUI applications on the Mac with Python. -*PyObjC* is a Python binding to Apple's Objective-C/Cocoa framework, which is -the foundation of most modern Mac development. Information on PyObjC is -available from :pypi:`pyobjc`. - The standard Python GUI toolkit is :mod:`tkinter`, based on the cross-platform -Tk toolkit (https://www.tcl.tk). An Aqua-native version of Tk is bundled with -macOS by Apple, and the latest version can be downloaded and installed from -https://www.activestate.com; it can also be built from source. +Tk toolkit (https://www.tcl.tk). A macOS-native version of Tk is included with +the installer. + +*PyObjC* is a Python binding to Apple's Objective-C/Cocoa framework. +Information on PyObjC is available from :pypi:`pyobjc`. -A number of alternative macOS GUI toolkits are available: +A number of alternative macOS GUI toolkits are available including: -* `PySide `__: Official Python bindings to the - `Qt GUI toolkit `__. +* `PySide `_: Official Python bindings to the + `Qt GUI toolkit `_. -* `PyQt `__: Alternative +* `PyQt `_: Alternative Python bindings to Qt. -* `Kivy `__: A cross-platform GUI toolkit that supports +* `Kivy `_: A cross-platform GUI toolkit that supports desktop and mobile platforms. -* `Toga `__: Part of the `BeeWare Project - `__; supports desktop, mobile, web and console apps. +* `Toga `_: Part of the `BeeWare Project + `_; supports desktop, mobile, web and console apps. -* `wxPython `__: A cross-platform toolkit that +* `wxPython `_: A cross-platform toolkit that supports desktop operating systems. + +Advanced Topics +=============== + +.. _install-freethreaded-macos: + +Installing Free-threaded Binaries +--------------------------------- + +.. versionadded:: 3.13 (Experimental) + +.. note:: + + Everything described in this section is considered experimental, + and should be expected to change in future releases. + +The ``python.org`` :ref:`Python for macOS ` +installer package can optionally install an additional build of +Python |usemac_x_dot_y| that supports :pep:`703`, the experimental free-threading feature +(running with the :term:`global interpreter lock` disabled). +Check the release page on ``python.org`` for possible updated information. + +Because this feature is still considered experimental, the support for it +is not installed by default. It is packaged as a separate install option, +available by clicking the **Customize** button on the **Installation Type** +step of the installer as described above. + +.. image:: mac_installer_09_custom_install_free_threaded.png + +If the box next to the **Free-threaded Python** package name is checked, +a separate :file:`PythonT.framework` will also be installed +alongside the normal :file:`Python.framework` in :file:`/Library/Frameworks`. +This configuration allows a free-threaded Python |usemac_x_dot_y| build to co-exist +on your system with a traditional (GIL only) Python |usemac_x_dot_y| build with +minimal risk while installing or testing. This installation layout is itself +experimental and is subject to change in future releases. + +Known cautions and limitations: + +- The **UNIX command-line tools** package, which is selected by default, + will install links in :file:`/usr/local/bin` for |usemac_python_x_dot_y_t_literal|, + the free-threaded interpreter, and |usemac_python_x_dot_y_t_literal_config|, + a configuration utility which may be useful for package builders. + Since :file:`/usr/local/bin` is typically included in your shell ``PATH``, + in most cases no changes to your ``PATH`` environment variables should + be needed to use |usemac_python_x_dot_y_t_literal|. + +- For this release, the **Shell profile updater** package and the + :file:`Update Shell Profile.command` in |usemac_applications_folder_version| + do not support the free-threaded package. + +- The free-threaded build and the traditional build have separate search + paths and separate :file:`site-packages` directories so, by default, + if you need a package available in both builds, it may need to be installed in both. + The free-threaded package will install a separate instance of :program:`pip` for use + with |usemac_python_x_dot_y_t_literal|. + + - To install a package using :command:`pip` without a :command:`venv`: + + |usemac_python_x_dot_y_t_literal| ``-m pip install `` + +- When working with multiple Python environments, it is usually safest and easiest + to :ref:`create and use virtual environments `. + This can avoid possible command name conflicts and confusion about which Python is in use: + + |usemac_python_x_dot_y_t_literal| ``-m venv `` + + then :command:`activate`. + +- To run a free-threaded version of IDLE: + + |usemac_python_x_dot_y_t_literal| ``-m idlelib`` + +- The interpreters in both builds respond to the same + :ref:`PYTHON environment variables ` + which may have unexpected results, for example, if you have ``PYTHONPATH`` + set in a shell profile. If necessary, there are + :ref:`command line options ` like ``-E`` + to ignore these environment variables. + +- The free-threaded build links to the third-party shared libraries, + such as ``OpenSSL`` and ``Tk``, installed in the traditional framework. + This means that both builds also share one set of trust certificates + as installed by the :command:`Install Certificates.command` script, + thus it only needs to be run once. + +- If you cannot depend on the link in ``/usr/local/bin`` pointing to the + ``python.org`` free-threaded |usemac_python_x_dot_y_t_literal| (for example, if you want + to install your own version there or some other distribution does), + you can explicitly set your shell ``PATH`` environment variable to + include the ``PythonT`` framework ``bin`` directory: + + .. code-block:: sh + + export PATH="/Library/Frameworks/PythonT.framework/Versions/3.13/bin":"$PATH" + + The traditional framework installation by default does something similar, + except for :file:`Python.framework`. Be aware that having both framework ``bin`` + directories in ``PATH`` can lead to confusion if there are duplicate names + like ``python3.13`` in both; which one is actually used depends on the order + they appear in ``PATH``. The ``which python3.x`` or ``which python3.xt`` + commands can show which path is being used. Using virtual environments + can help avoid such ambiguities. Another option might be to create + a shell :command:`alias` to the desired interpreter, like: + + .. code-block:: sh + + alias py3.13="/Library/Frameworks/Python.framework/Versions/3.13/bin/python3.13" + alias py3.13t="/Library/Frameworks/PythonT.framework/Versions/3.13/bin/python3.13t" + +Installing using the command line +--------------------------------- + +If you want to use automation to install the ``python.org`` installer package +(rather than by using the familiar macOS :program:`Installer` GUI app), +the macOS command line :command:`installer` utility lets you select non-default +options, too. If you are not familiar with :command:`installer`, it can be +somewhat cryptic (see :command:`man installer` for more information). +As an example, the following shell snippet shows one way to do it, +using the ``3.13.0b2`` release and selecting the free-threaded interpreter +option: + +.. code-block:: sh + + RELEASE="python-3.13.0b2-macos11.pkg" + + # download installer pkg + curl -O https://www.python.org/ftp/python/3.13.0/${RELEASE} + + # create installer choicechanges to customize the install: + # enable the PythonTFramework-3.13 package + # while accepting the other defaults (install all other packages) + cat > ./choicechanges.plist < + + + + + attributeSetting + 1 + choiceAttribute + selected + choiceIdentifier + org.python.Python.PythonTFramework-3.13 + + + + EOF + + sudo installer -pkg ./${RELEASE} -applyChoiceChangesXML ./choicechanges.plist -target / + + +You can then test that both installer builds are now available with something like: + +.. code-block:: console + + $ # test that the free-threaded interpreter was installed if the Unix Command Tools package was enabled + $ /usr/local/bin/python3.13t -VV + Python 3.13.0b2 experimental free-threading build (v3.13.0b2:3a83b172af, Jun 5 2024, 12:57:31) [Clang 15.0.0 (clang-1500.3.9.4)] + $ # and the traditional interpreter + $ /usr/local/bin/python3.13 -VV + Python 3.13.0b2 (v3.13.0b2:3a83b172af, Jun 5 2024, 12:50:24) [Clang 15.0.0 (clang-1500.3.9.4)] + $ # test that they are also available without the prefix if /usr/local/bin is on $PATH + $ python3.13t -VV + Python 3.13.0b2 experimental free-threading build (v3.13.0b2:3a83b172af, Jun 5 2024, 12:57:31) [Clang 15.0.0 (clang-1500.3.9.4)] + $ python3.13 -VV + Python 3.13.0b2 (v3.13.0b2:3a83b172af, Jun 5 2024, 12:50:24) [Clang 15.0.0 (clang-1500.3.9.4)] + +.. note:: + + Current ``python.org`` installers only install to fixed locations like + :file:`/Library/Frameworks/`, :file:`/Applications`, and :file:`/usr/local/bin`. + You cannot use the :command:`installer` ``-domain`` option to install to + other locations. + .. _distributing-python-applications-on-the-mac: Distributing Python Applications -================================ +-------------------------------- A range of tools exist for converting your Python code into a standalone distributable application: @@ -180,12 +436,12 @@ distributable application: * :pypi:`py2app`: Supports creating macOS ``.app`` bundles from a Python project. -* `Briefcase `__: Part of the `BeeWare Project - `__; a cross-platform packaging tool that supports +* `Briefcase `_: Part of the `BeeWare Project + `_; a cross-platform packaging tool that supports creation of ``.app`` bundles on macOS, as well as managing signing and notarization. -* `PyInstaller `__: A cross-platform packaging tool that creates +* `PyInstaller `_: A cross-platform packaging tool that creates a single file or folder as a distributable artifact. App Store Compliance @@ -213,11 +469,6 @@ required if you are using the macOS App Store as a distribution channel. Other Resources =============== -The Pythonmac-SIG mailing list is an excellent support resource for Python users -and developers on the Mac: - -https://www.python.org/community/sigs/current/pythonmac-sig/ - -Another useful resource is the MacPython wiki: - -https://wiki.python.org/moin/MacPython +The `python.org Help page `_ has links to many useful resources. +The `Pythonmac-SIG mailing list `_ +is another support resource specifically for Python users and developers on the Mac. diff --git a/Doc/using/mac_installer_01_introduction.png b/Doc/using/mac_installer_01_introduction.png new file mode 100644 index 00000000000000..1999f3a3759093 Binary files /dev/null and b/Doc/using/mac_installer_01_introduction.png differ diff --git a/Doc/using/mac_installer_02_readme.png b/Doc/using/mac_installer_02_readme.png new file mode 100644 index 00000000000000..a36efaf7d50fd6 Binary files /dev/null and b/Doc/using/mac_installer_02_readme.png differ diff --git a/Doc/using/mac_installer_03_license.png b/Doc/using/mac_installer_03_license.png new file mode 100644 index 00000000000000..598c22a13d9e62 Binary files /dev/null and b/Doc/using/mac_installer_03_license.png differ diff --git a/Doc/using/mac_installer_04_installation_type.png b/Doc/using/mac_installer_04_installation_type.png new file mode 100644 index 00000000000000..9498fd06240a4e Binary files /dev/null and b/Doc/using/mac_installer_04_installation_type.png differ diff --git a/Doc/using/mac_installer_05_custom_install.png b/Doc/using/mac_installer_05_custom_install.png new file mode 100644 index 00000000000000..3a201d2f44655a Binary files /dev/null and b/Doc/using/mac_installer_05_custom_install.png differ diff --git a/Doc/using/mac_installer_06_summary.png b/Doc/using/mac_installer_06_summary.png new file mode 100644 index 00000000000000..1af6eee2c668cd Binary files /dev/null and b/Doc/using/mac_installer_06_summary.png differ diff --git a/Doc/using/mac_installer_07_applications.png b/Doc/using/mac_installer_07_applications.png new file mode 100644 index 00000000000000..940219cad6f61c Binary files /dev/null and b/Doc/using/mac_installer_07_applications.png differ diff --git a/Doc/using/mac_installer_08_install_certificates.png b/Doc/using/mac_installer_08_install_certificates.png new file mode 100644 index 00000000000000..c125eeb18aa0c1 Binary files /dev/null and b/Doc/using/mac_installer_08_install_certificates.png differ diff --git a/Doc/using/mac_installer_09_custom_install_free_threaded.png b/Doc/using/mac_installer_09_custom_install_free_threaded.png new file mode 100644 index 00000000000000..0f69c55eddb228 Binary files /dev/null and b/Doc/using/mac_installer_09_custom_install_free_threaded.png differ diff --git a/Doc/using/venv-create.inc b/Doc/using/venv-create.inc deleted file mode 100644 index 354eb1541ceac2..00000000000000 --- a/Doc/using/venv-create.inc +++ /dev/null @@ -1,121 +0,0 @@ -Creation of :ref:`virtual environments ` is done by executing the -command ``venv``:: - - python -m venv /path/to/new/virtual/environment - -Running this command creates the target directory (creating any parent -directories that don't exist already) and places a ``pyvenv.cfg`` file in it -with a ``home`` key pointing to the Python installation from which the command -was run (a common name for the target directory is ``.venv``). It also creates -a ``bin`` (or ``Scripts`` on Windows) subdirectory containing a copy/symlink -of the Python binary/binaries (as appropriate for the platform or arguments -used at environment creation time). It also creates an (initially empty) -``lib/pythonX.Y/site-packages`` subdirectory (on Windows, this is -``Lib\site-packages``). If an existing directory is specified, it will be -re-used. - -.. versionchanged:: 3.5 - The use of ``venv`` is now recommended for creating virtual environments. - -.. deprecated:: 3.6 - ``pyvenv`` was the recommended tool for creating virtual environments for - Python 3.3 and 3.4, and is - :ref:`deprecated in Python 3.6 `. - -.. highlight:: none - -On Windows, invoke the ``venv`` command as follows:: - - c:\>Python35\python -m venv c:\path\to\myenv - -Alternatively, if you configured the ``PATH`` and ``PATHEXT`` variables for -your :ref:`Python installation `:: - - c:\>python -m venv c:\path\to\myenv - -The command, if run with ``-h``, will show the available options:: - - usage: venv [-h] [--system-site-packages] [--symlinks | --copies] [--clear] - [--upgrade] [--without-pip] [--prompt PROMPT] [--upgrade-deps] - [--without-scm-ignore-file] - ENV_DIR [ENV_DIR ...] - - Creates virtual Python environments in one or more target directories. - - positional arguments: - ENV_DIR A directory to create the environment in. - - options: - -h, --help show this help message and exit - --system-site-packages - Give the virtual environment access to the system - site-packages dir. - --symlinks Try to use symlinks rather than copies, when - symlinks are not the default for the platform. - --copies Try to use copies rather than symlinks, even when - symlinks are the default for the platform. - --clear Delete the contents of the environment directory if - it already exists, before environment creation. - --upgrade Upgrade the environment directory to use this - version of Python, assuming Python has been upgraded - in-place. - --without-pip Skips installing or upgrading pip in the virtual - environment (pip is bootstrapped by default) - --prompt PROMPT Provides an alternative prompt prefix for this - environment. - --upgrade-deps Upgrade core dependencies (pip) to the latest - version in PyPI - --without-scm-ignore-file - Skips adding the default SCM ignore file to the - environment directory (the default is a .gitignore - file). - - Once an environment has been created, you may wish to activate it, e.g. by - sourcing an activate script in its bin directory. - -.. versionchanged:: 3.13 - - ``--without-scm-ignore-file`` was added along with creating an ignore file - for ``git`` by default. - -.. versionchanged:: 3.12 - - ``setuptools`` is no longer a core venv dependency. - -.. versionchanged:: 3.9 - Add ``--upgrade-deps`` option to upgrade pip + setuptools to the latest on PyPI - -.. versionchanged:: 3.4 - Installs pip by default, added the ``--without-pip`` and ``--copies`` - options - -.. versionchanged:: 3.4 - In earlier versions, if the target directory already existed, an error was - raised, unless the ``--clear`` or ``--upgrade`` option was provided. - -.. note:: - While symlinks are supported on Windows, they are not recommended. Of - particular note is that double-clicking ``python.exe`` in File Explorer - will resolve the symlink eagerly and ignore the virtual environment. - -.. note:: - On Microsoft Windows, it may be required to enable the ``Activate.ps1`` - script by setting the execution policy for the user. You can do this by - issuing the following PowerShell command: - - PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser - - See `About Execution Policies - `_ - for more information. - -The created ``pyvenv.cfg`` file also includes the -``include-system-site-packages`` key, set to ``true`` if ``venv`` is -run with the ``--system-site-packages`` option, ``false`` otherwise. - -Unless the ``--without-pip`` option is given, :mod:`ensurepip` will be -invoked to bootstrap ``pip`` into the virtual environment. - -Multiple paths can be given to ``venv``, in which case an identical virtual -environment will be created, according to the given options, at each provided -path. diff --git a/Doc/whatsnew/2.1.rst b/Doc/whatsnew/2.1.rst index 8eafb48461a67c..f23f27c994d717 100644 --- a/Doc/whatsnew/2.1.rst +++ b/Doc/whatsnew/2.1.rst @@ -443,8 +443,8 @@ Python syntax:: f.grammar = "A ::= B (C D)*" The dictionary containing attributes can be accessed as the function's -:attr:`~object.__dict__`. Unlike the :attr:`~object.__dict__` attribute of class instances, in -functions you can actually assign a new dictionary to :attr:`~object.__dict__`, though +:attr:`~function.__dict__`. Unlike the :attr:`~type.__dict__` attribute of class instances, in +functions you can actually assign a new dictionary to :attr:`~function.__dict__`, though the new value is restricted to a regular Python dictionary; you *can't* be tricky and set it to a :class:`!UserDict` instance, or any other random object that behaves like a mapping. diff --git a/Doc/whatsnew/2.2.rst b/Doc/whatsnew/2.2.rst index 5db34fa08c634a..856be5ecfa56ad 100644 --- a/Doc/whatsnew/2.2.rst +++ b/Doc/whatsnew/2.2.rst @@ -171,7 +171,7 @@ attributes of their own: * :attr:`~definition.__name__` is the attribute's name. -* :attr:`!__doc__` is the attribute's docstring. +* :attr:`~definition.__doc__` is the attribute's docstring. * ``__get__(object)`` is a method that retrieves the attribute value from *object*. @@ -186,7 +186,8 @@ are:: descriptor = obj.__class__.x descriptor.__get__(obj) -For methods, :meth:`!descriptor.__get__` returns a temporary object that's +For methods, :meth:`descriptor.__get__ ` returns a temporary +object that's callable, and wraps up the instance and the method to be called on it. This is also why static methods and class methods are now possible; they have descriptors that wrap up just the method, or the method and the class. As a diff --git a/Doc/whatsnew/2.3.rst b/Doc/whatsnew/2.3.rst index 80849ab9a1a3db..ac463f82cfb8ca 100644 --- a/Doc/whatsnew/2.3.rst +++ b/Doc/whatsnew/2.3.rst @@ -1113,10 +1113,10 @@ Here are all of the changes that Python 2.3 makes to the core Python language. * One of the noted incompatibilities between old- and new-style classes has been - removed: you can now assign to the :attr:`~definition.__name__` and :attr:`~class.__bases__` + removed: you can now assign to the :attr:`~type.__name__` and :attr:`~type.__bases__` attributes of new-style classes. There are some restrictions on what can be - assigned to :attr:`~class.__bases__` along the lines of those relating to assigning to - an instance's :attr:`~instance.__class__` attribute. + assigned to :attr:`!__bases__` along the lines of those relating to assigning to + an instance's :attr:`~object.__class__` attribute. .. ====================================================================== @@ -1925,8 +1925,8 @@ Changes to Python's build process and to the C API include: dependence on a system version or local installation of Expat. * If you dynamically allocate type objects in your extension, you should be - aware of a change in the rules relating to the :attr:`!__module__` and - :attr:`~definition.__name__` attributes. In summary, you will want to ensure the type's + aware of a change in the rules relating to the :attr:`~type.__module__` and + :attr:`~type.__name__` attributes. In summary, you will want to ensure the type's dictionary contains a ``'__module__'`` key; making the module name the part of the type name leading up to the final period will no longer have the desired effect. For more detail, read the API reference documentation or the source. diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst index 9dc17494c42966..ec110a3952c07c 100644 --- a/Doc/whatsnew/3.12.rst +++ b/Doc/whatsnew/3.12.rst @@ -1887,7 +1887,7 @@ New Features The :c:macro:`Py_TPFLAGS_MANAGED_DICT` and :c:macro:`Py_TPFLAGS_MANAGED_WEAKREF` flags have been added. This allows extensions classes to support object - ``__dict__`` and weakrefs with less bookkeeping, + :attr:`~object.__dict__` and weakrefs with less bookkeeping, using less memory and with faster access. * API for performing calls using @@ -2006,7 +2006,7 @@ Porting to Python 3.12 internal-only field directly. To get a list of subclasses, call the Python method - :py:meth:`~class.__subclasses__` (using :c:func:`PyObject_CallMethod`, + :py:meth:`~type.__subclasses__` (using :c:func:`PyObject_CallMethod`, for example). * Add support of more formatting options (left aligning, octals, uppercase @@ -2025,7 +2025,7 @@ Porting to Python 3.12 :c:func:`PyUnicode_FromFormatV`. (Contributed by Philip Georgi in :gh:`95504`.) -* Extension classes wanting to add a ``__dict__`` or weak reference slot +* Extension classes wanting to add a :attr:`~object.__dict__` or weak reference slot should use :c:macro:`Py_TPFLAGS_MANAGED_DICT` and :c:macro:`Py_TPFLAGS_MANAGED_WEAKREF` instead of ``tp_dictoffset`` and ``tp_weaklistoffset``, respectively. diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst index 5640759e79b734..52fe749697cfa4 100644 --- a/Doc/whatsnew/3.13.rst +++ b/Doc/whatsnew/3.13.rst @@ -121,9 +121,10 @@ Interpreter improvements: Python data model improvements: -* :attr:`~class.__static_attributes__` stores the names of attributes accessed +* :attr:`~type.__static_attributes__` stores the names of attributes accessed through ``self.X`` in any function in a class body. -* :attr:`!__firstlineno__` records the first line number of a class definition. +* :attr:`~type.__firstlineno__` records the first line number of a class + definition. Significant improvements in the standard library: @@ -588,7 +589,7 @@ Other Language Changes (Contributed by Levi Sabah, Zackery Spytz and Hugo van Kemenade in :gh:`73965`.) -* Classes have a new :attr:`~class.__static_attributes__` attribute. +* Classes have a new :attr:`~type.__static_attributes__` attribute. This is populated by the compiler with a tuple of the class's attribute names which are assigned through ``self.`` from any function in its body. (Contributed by Irit Katriel in :gh:`115775`.) @@ -823,6 +824,24 @@ copy (Contributed by Serhiy Storchaka in :gh:`108751`.) +ctypes +------ + +* As a consequence of necessary internal refactoring, initialization of + internal metaclasses now happens in ``__init__`` rather + than in ``__new__``. This affects projects that subclass these internal + metaclasses to provide custom initialization. + Generally: + + - Custom logic that was done in ``__new__`` after calling ``super().__new__`` + should be moved to ``__init__``. + - To create a class, call the metaclass, not only the metaclass's + ``__new__`` method. + + See :gh:`124520` for discussion and links to changes in some affected + projects. + + dbm --- @@ -2191,13 +2210,13 @@ New Features * Add the :c:func:`PyType_GetFullyQualifiedName` function to get the type's fully qualified name. - The module name is prepended if ``type.__module__`` is a string - and is not equal to either ``'builtins'`` or ``'__main__'``. + The module name is prepended if :attr:`type.__module__` is + a string and is not equal to either ``'builtins'`` or ``'__main__'``. (Contributed by Victor Stinner in :gh:`111696`.) * Add the :c:func:`PyType_GetModuleName` function - to get the type's module name. - This is equivalent to getting the ``type.__module__`` attribute. + to get the type's module name. This is equivalent to getting the + :attr:`type.__module__` attribute. (Contributed by Eric Snow and Victor Stinner in :gh:`111696`.) * Add the :c:func:`PyUnicode_EqualToUTF8AndSize` diff --git a/Doc/whatsnew/3.14.rst b/Doc/whatsnew/3.14.rst index 5acb9bfe18b2d0..ffc001241ac5ec 100644 --- a/Doc/whatsnew/3.14.rst +++ b/Doc/whatsnew/3.14.rst @@ -91,7 +91,7 @@ annotations. Annotations may be evaluated in the :attr:`~annotationlib.Format.VA format (which evaluates annotations to runtime values, similar to the behavior in earlier Python versions), the :attr:`~annotationlib.Format.FORWARDREF` format (which replaces undefined names with special markers), and the -:attr:`~annotationlib.Format.SOURCE` format (which returns annotations as strings). +:attr:`~annotationlib.Format.STRING` format (which returns annotations as strings). This example shows how these formats behave: @@ -106,7 +106,7 @@ This example shows how these formats behave: NameError: name 'Undefined' is not defined >>> get_annotations(func, format=Format.FORWARDREF) {'arg': ForwardRef('Undefined')} - >>> get_annotations(func, format=Format.SOURCE) + >>> get_annotations(func, format=Format.STRING) {'arg': 'Undefined'} Implications for annotated code @@ -185,7 +185,7 @@ Other Language Changes ``python -O -c 'assert (__debug__ := 1)'`` now produces a :exc:`SyntaxError`. (Contributed by Irit Katriel in :gh:`122245`.) -* Added class methods :meth:`float.from_number` and :meth:`complex.from_number` +* Add class methods :meth:`float.from_number` and :meth:`complex.from_number` to convert a number to :class:`float` or :class:`complex` type correspondingly. They raise an error if the argument is a string. (Contributed by Serhiy Storchaka in :gh:`84978`.) @@ -206,7 +206,7 @@ Improved Modules ast --- -* Added :func:`ast.compare` for comparing two ASTs. +* Add :func:`ast.compare` for comparing two ASTs. (Contributed by Batuhan Taskaya and Jeremy Hylton in :issue:`15987`.) * Add support for :func:`copy.replace` for AST nodes. @@ -215,6 +215,9 @@ ast * Docstrings are now removed from an optimized AST in optimization level 2. (Contributed by Irit Katriel in :gh:`123958`.) +* The ``repr()`` output for AST nodes now includes more information. + (Contributed by Tomas R in :gh:`116022`.) + ctypes ------ @@ -233,9 +236,9 @@ ctypes dis --- -* Added support for rendering full source location information of +* Add support for rendering full source location information of :class:`instructions `, rather than only the line number. - This feature is added to the following interfaces via the ``show_positions`` + This feature is added to the following interfaces via the *show_positions* keyword argument: - :class:`dis.Bytecode`, @@ -243,52 +246,66 @@ dis - :func:`dis.disassemble`. This feature is also exposed via :option:`dis --show-positions`. - (Contributed by Bénédikt Tran in :gh:`123165`.) fractions --------- -Added support for converting any objects that have the -:meth:`!as_integer_ratio` method to a :class:`~fractions.Fraction`. -(Contributed by Serhiy Storchaka in :gh:`82017`.) +* Add support for converting any objects that have the + :meth:`!as_integer_ratio` method to a :class:`~fractions.Fraction`. + (Contributed by Serhiy Storchaka in :gh:`82017`.) + + +functools +--------- + +* Add support to :func:`functools.partial` and + :func:`functools.partialmethod` for :data:`functools.Placeholder` sentinels + to reserve a place for positional arguments. + (Contributed by Dominykas Grigonis in :gh:`119127`.) http ---- -Directory lists and error pages generated by the :mod:`http.server` -module allow the browser to apply its default dark mode. -(Contributed by Yorik Hansen in :gh:`123430`.) +* Directory lists and error pages generated by the :mod:`http.server` + module allow the browser to apply its default dark mode. + (Contributed by Yorik Hansen in :gh:`123430`.) json ---- -Add notes for JSON serialization errors that allow to identify the source -of the error. -(Contributed by Serhiy Storchaka in :gh:`122163`.) +* Add notes for JSON serialization errors that allow to identify the source + of the error. + (Contributed by Serhiy Storchaka in :gh:`122163`.) -Enable :mod:`json` module to work as a script using the :option:`-m` switch: ``python -m json``. -See the :ref:`JSON command-line interface ` documentation. -(Contributed by Trey Hunner in :gh:`122873`.) +* Enable the :mod:`json` module to work as a script using the :option:`-m` switch: ``python -m json``. + See the :ref:`JSON command-line interface ` documentation. + (Contributed by Trey Hunner in :gh:`122873`.) operator -------- -* Two new functions ``operator.is_none`` and ``operator.is_not_none`` +* Two new functions :func:`operator.is_none` and :func:`operator.is_not_none` have been added, such that ``operator.is_none(obj)`` is equivalent to ``obj is None`` and ``operator.is_not_none(obj)`` is equivalent to ``obj is not None``. (Contributed by Raymond Hettinger and Nico Mexis in :gh:`115808`.) +datetime +-------- + +* Add :meth:`datetime.time.strptime` and :meth:`datetime.date.strptime`. + (Contributed by Wannes Boeykens in :gh:`41431`.) + os -- -* Added the :data:`os.environ.refresh() ` method to update +* Add the :data:`os.environ.refresh() ` method to update :data:`os.environ` with changes to the environment made by :func:`os.putenv`, by :func:`os.unsetenv`, or made outside Python in the same process. (Contributed by Victor Stinner in :gh:`120057`.) @@ -318,12 +335,15 @@ pdb :pdbcmd:`commands` are preserved across hard-coded breakpoints. (Contributed by Tian Gao in :gh:`121450`.) +* Add a new argument *mode* to :class:`pdb.Pdb`. Disable the ``restart`` + command when :mod:`pdb` is in ``inline`` mode. + (Contributed by Tian Gao in :gh:`123757`.) pickle ------ * Set the default protocol version on the :mod:`pickle` module to 5. - For more details, please see :ref:`pickle protocols `. + For more details, see :ref:`pickle protocols `. * Add notes for pickle serialization errors that allow to identify the source of the error. @@ -361,12 +381,26 @@ asyncio Deprecated ========== +* :mod:`asyncio`: + :func:`!asyncio.iscoroutinefunction` is deprecated + and will be removed in Python 3.16, + use :func:`inspect.iscoroutinefunction` instead. + (Contributed by Jiahao Li and Kumar Aditya in :gh:`122875`.) + * :mod:`builtins`: Passing a complex number as the *real* or *imag* argument in the :func:`complex` constructor is now deprecated; it should only be passed as a single positional argument. (Contributed by Serhiy Storchaka in :gh:`109218`.) +* :mod:`multiprocessing` and :mod:`concurrent.futures`: + The default start method (see :ref:`multiprocessing-start-methods`) changed + away from *fork* to *forkserver* on platforms where it was not already + *spawn* (Windows & macOS). If you require the threading incompatible *fork* + start method you must explicitly specify it when using :mod:`multiprocessing` + or :mod:`concurrent.futures` APIs. + (Contributed by Gregory P. Smith in :gh:`84559`.) + * :mod:`os`: :term:`Soft deprecate ` :func:`os.popen` and :func:`os.spawn* ` functions. They should no longer be used to @@ -411,7 +445,7 @@ ast user-defined ``visit_Num``, ``visit_Str``, ``visit_Bytes``, ``visit_NameConstant`` and ``visit_Ellipsis`` methods on custom :class:`ast.NodeVisitor` subclasses will no longer be called when the - ``NodeVisitor`` subclass is visiting an AST. Define a ``visit_Constant`` + :class:`!NodeVisitor` subclass is visiting an AST. Define a ``visit_Constant`` method instead. Also, remove the following deprecated properties on :class:`ast.Constant`, @@ -562,18 +596,18 @@ New Features * Add a new :c:type:`PyUnicodeWriter` API to create a Python :class:`str` object: - * :c:func:`PyUnicodeWriter_Create`. - * :c:func:`PyUnicodeWriter_Discard`. - * :c:func:`PyUnicodeWriter_Finish`. - * :c:func:`PyUnicodeWriter_WriteChar`. - * :c:func:`PyUnicodeWriter_WriteUTF8`. - * :c:func:`PyUnicodeWriter_WriteUCS4`. - * :c:func:`PyUnicodeWriter_WriteWideChar`. - * :c:func:`PyUnicodeWriter_WriteStr`. - * :c:func:`PyUnicodeWriter_WriteRepr`. - * :c:func:`PyUnicodeWriter_WriteSubstring`. - * :c:func:`PyUnicodeWriter_Format`. - * :c:func:`PyUnicodeWriter_DecodeUTF8Stateful`. + * :c:func:`PyUnicodeWriter_Create` + * :c:func:`PyUnicodeWriter_Discard` + * :c:func:`PyUnicodeWriter_Finish` + * :c:func:`PyUnicodeWriter_WriteChar` + * :c:func:`PyUnicodeWriter_WriteUTF8` + * :c:func:`PyUnicodeWriter_WriteUCS4` + * :c:func:`PyUnicodeWriter_WriteWideChar` + * :c:func:`PyUnicodeWriter_WriteStr` + * :c:func:`PyUnicodeWriter_WriteRepr` + * :c:func:`PyUnicodeWriter_WriteSubstring` + * :c:func:`PyUnicodeWriter_Format` + * :c:func:`PyUnicodeWriter_DecodeUTF8Stateful` (Contributed by Victor Stinner in :gh:`119182`.) @@ -585,11 +619,11 @@ New Features is backwards incompatible to any C-Extension that holds onto an interned string after a call to :c:func:`Py_Finalize` and is then reused after a call to :c:func:`Py_Initialize`. Any issues arising from this behavior will - normally result in crashes during the exectuion of the subsequent call to + normally result in crashes during the execution of the subsequent call to :c:func:`Py_Initialize` from accessing uninitialized memory. To fix, use an address sanitizer to identify any use-after-free coming from an interned string and deallocate it during module shutdown. - (Contribued by Eddie Elizondo in :gh:`113601`.) + (Contributed by Eddie Elizondo in :gh:`113601`.) * Add new functions to convert C ```` numbers from/to Python :class:`int`: @@ -665,12 +699,7 @@ Deprecated :c:macro:`!isfinite` available from :file:`math.h` since C99. (Contributed by Sergey B Kirpichev in :gh:`119613`.) -* :func:`!asyncio.iscoroutinefunction` is deprecated - and will be removed in Python 3.16, - use :func:`inspect.iscoroutinefunction` instead. - (Contributed by Jiahao Li and Kumar Aditya in :gh:`122875`.) - -.. Add deprecations above alphabetically, not here at the end. +.. Add C API deprecations above alphabetically, not here at the end. .. include:: ../deprecations/c-api-pending-removal-in-3.15.rst diff --git a/Doc/whatsnew/3.3.rst b/Doc/whatsnew/3.3.rst index 95b89e7579fcce..f814c4e90d5719 100644 --- a/Doc/whatsnew/3.3.rst +++ b/Doc/whatsnew/3.3.rst @@ -549,9 +549,11 @@ separation of binary and text data). PEP 3155: Qualified name for classes and functions ================================================== -Functions and class objects have a new ``__qualname__`` attribute representing +Functions and class objects have a new :attr:`~definition.__qualname__` +attribute representing the "path" from the module top-level to their definition. For global functions -and classes, this is the same as ``__name__``. For other functions and classes, +and classes, this is the same as :attr:`~definition.__name__`. +For other functions and classes, it provides better information about where they were actually defined, and how they might be accessible from the global scope. diff --git a/Doc/whatsnew/3.5.rst b/Doc/whatsnew/3.5.rst index 077d8c1aae91ae..d4ae6f1f45d346 100644 --- a/Doc/whatsnew/3.5.rst +++ b/Doc/whatsnew/3.5.rst @@ -2526,9 +2526,9 @@ Changes in the C API to format the :func:`repr` of the object. (Contributed by Serhiy Storchaka in :issue:`22453`.) -* Because the lack of the :attr:`__module__` attribute breaks pickling and +* Because the lack of the :attr:`~type.__module__` attribute breaks pickling and introspection, a deprecation warning is now raised for builtin types without - the :attr:`__module__` attribute. This would be an AttributeError in + the :attr:`~type.__module__` attribute. This will be an :exc:`AttributeError` in the future. (Contributed by Serhiy Storchaka in :issue:`20204`.) diff --git a/Doc/whatsnew/3.6.rst b/Doc/whatsnew/3.6.rst index be83aa8a8550c5..2276fed60c8db3 100644 --- a/Doc/whatsnew/3.6.rst +++ b/Doc/whatsnew/3.6.rst @@ -549,7 +549,7 @@ PEP 520: Preserving Class Attribute Definition Order Attributes in a class definition body have a natural ordering: the same order in which the names appear in the source. This order is now -preserved in the new class's :attr:`~object.__dict__` attribute. +preserved in the new class's :attr:`~type.__dict__` attribute. Also, the effective default class *execution* namespace (returned from :ref:`type.__prepare__() `) is now an insertion-order-preserving @@ -934,7 +934,7 @@ asynchronous generators. The :func:`~collections.namedtuple` function now accepts an optional keyword argument *module*, which, when specified, is used for -the ``__module__`` attribute of the returned named tuple class. +the :attr:`~type.__module__` attribute of the returned named tuple class. (Contributed by Raymond Hettinger in :issue:`17941`.) The *verbose* and *rename* arguments for diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index 1b24abfa857150..6118b02dd9bd48 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -637,7 +637,8 @@ pydoc ----- The documentation string is now shown not only for class, function, -method etc, but for any object that has its own ``__doc__`` attribute. +method etc, but for any object that has its own :attr:`~definition.__doc__` +attribute. (Contributed by Serhiy Storchaka in :issue:`40257`.) random diff --git a/Include/Python.h b/Include/Python.h index 8fffa22df9da48..e1abdd16f031fb 100644 --- a/Include/Python.h +++ b/Include/Python.h @@ -55,6 +55,10 @@ # include // __readgsqword() #endif +#if defined(Py_GIL_DISABLED) && defined(__MINGW32__) +# include // __readgsqword() +#endif + // Include Python header files #include "pyport.h" #include "pymacro.h" diff --git a/Include/cpython/context.h b/Include/cpython/context.h index a509f4eaba3d77..ec72966e82c6f9 100644 --- a/Include/cpython/context.h +++ b/Include/cpython/context.h @@ -33,10 +33,9 @@ typedef enum { } PyContextEvent; /* - * A Callback to clue in non-python contexts impls about a - * change in the active python context. + * Callback to be invoked when a context object is entered or exited. * - * The callback is invoked with the event and a reference to = + * The callback is invoked with the event and a reference to * the context after its entered and before its exited. * * if the callback returns with an exception set, it must return -1. Otherwise diff --git a/Include/cpython/longobject.h b/Include/cpython/longobject.h index 82f8cc8a159c77..b239f7c557e016 100644 --- a/Include/cpython/longobject.h +++ b/Include/cpython/longobject.h @@ -71,10 +71,9 @@ PyAPI_FUNC(int) _PyLong_Sign(PyObject *v); absolute value of a long. For example, this returns 1 for 1 and -1, 2 for 2 and -2, and 2 for 3 and -3. It returns 0 for 0. v must not be NULL, and must be a normalized long. - (uint64_t)-1 is returned and OverflowError set if the true result doesn't - fit in a size_t. + Always successful. */ -PyAPI_FUNC(uint64_t) _PyLong_NumBits(PyObject *v); +PyAPI_FUNC(int64_t) _PyLong_NumBits(PyObject *v); /* _PyLong_FromByteArray: View the n unsigned bytes as a binary integer in base 256, and return a Python int with the same numeric value. diff --git a/Include/internal/pycore_backoff.h b/Include/internal/pycore_backoff.h index 3db3aa3eb77879..a9d1bce127e63d 100644 --- a/Include/internal/pycore_backoff.h +++ b/Include/internal/pycore_backoff.h @@ -108,7 +108,7 @@ backoff_counter_triggers(_Py_BackoffCounter counter) /* Initial JUMP_BACKWARD counter. * This determines when we create a trace for a loop. * Backoff sequence 16, 32, 64, 128, 256, 512, 1024, 2048, 4096. */ -#define JUMP_BACKWARD_INITIAL_VALUE 16 +#define JUMP_BACKWARD_INITIAL_VALUE 15 #define JUMP_BACKWARD_INITIAL_BACKOFF 4 static inline _Py_BackoffCounter initial_jump_backoff_counter(void) @@ -122,7 +122,7 @@ initial_jump_backoff_counter(void) * otherwise when a side exit warms up we may construct * a new trace before the Tier 1 code has properly re-specialized. * Backoff sequence 64, 128, 256, 512, 1024, 2048, 4096. */ -#define SIDE_EXIT_INITIAL_VALUE 64 +#define SIDE_EXIT_INITIAL_VALUE 63 #define SIDE_EXIT_INITIAL_BACKOFF 6 static inline _Py_BackoffCounter diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h index a97b53028c8f59..363845106e40dc 100644 --- a/Include/internal/pycore_ceval.h +++ b/Include/internal/pycore_ceval.h @@ -283,6 +283,7 @@ PyAPI_FUNC(PyObject *) _PyEval_LoadName(PyThreadState *tstate, _PyInterpreterFra #define _PY_GC_SCHEDULED_BIT (1U << 4) #define _PY_EVAL_PLEASE_STOP_BIT (1U << 5) #define _PY_EVAL_EXPLICIT_MERGE_BIT (1U << 6) +#define _PY_EVAL_JIT_INVALIDATE_COLD_BIT (1U << 7) /* Reserve a few bits for future use */ #define _PY_EVAL_EVENTS_BITS 8 diff --git a/Include/internal/pycore_codecs.h b/Include/internal/pycore_codecs.h index 5e2d5c5ce9d868..4400be8b33dee7 100644 --- a/Include/internal/pycore_codecs.h +++ b/Include/internal/pycore_codecs.h @@ -21,6 +21,17 @@ extern void _PyCodec_Fini(PyInterpreterState *interp); extern PyObject* _PyCodec_Lookup(const char *encoding); +/* + * Un-register the error handling callback function registered under + * the given 'name'. Only custom error handlers can be un-registered. + * + * - Return -1 and set an exception if 'name' refers to a built-in + * error handling name (e.g., 'strict'), or if an error occurred. + * - Return 0 if no custom error handler can be found for 'name'. + * - Return 1 if the custom error handler was successfully removed. + */ +extern int _PyCodec_UnregisterError(const char *name); + /* Text codec specific encoding and decoding API. Checks the encoding against a list of codecs which do not diff --git a/Include/internal/pycore_frame.h b/Include/internal/pycore_frame.h index a77658134fae8c..c9ac3819d0390b 100644 --- a/Include/internal/pycore_frame.h +++ b/Include/internal/pycore_frame.h @@ -62,7 +62,7 @@ enum _frameowner { typedef struct _PyInterpreterFrame { _PyStackRef f_executable; /* Deferred or strong reference (code object or None) */ struct _PyInterpreterFrame *previous; - PyObject *f_funcobj; /* Strong reference. Only valid if not on C stack */ + _PyStackRef f_funcobj; /* Deferred or strong reference. Only valid if not on C stack */ PyObject *f_globals; /* Borrowed reference. Only valid if not on C stack */ PyObject *f_builtins; /* Borrowed reference. Only valid if not on C stack */ PyObject *f_locals; /* Strong reference, may be NULL. Only valid if not on C stack */ @@ -84,6 +84,12 @@ static inline PyCodeObject *_PyFrame_GetCode(_PyInterpreterFrame *f) { return (PyCodeObject *)executable; } +static inline PyFunctionObject *_PyFrame_GetFunction(_PyInterpreterFrame *f) { + PyObject *func = PyStackRef_AsPyObjectBorrow(f->f_funcobj); + assert(PyFunction_Check(func)); + return (PyFunctionObject *)func; +} + static inline _PyStackRef *_PyFrame_Stackbase(_PyInterpreterFrame *f) { return (f->localsplus + _PyFrame_GetCode(f)->co_nlocalsplus); } @@ -144,14 +150,15 @@ static inline void _PyFrame_Copy(_PyInterpreterFrame *src, _PyInterpreterFrame * */ static inline void _PyFrame_Initialize( - _PyInterpreterFrame *frame, PyFunctionObject *func, + _PyInterpreterFrame *frame, _PyStackRef func, PyObject *locals, PyCodeObject *code, int null_locals_from, _PyInterpreterFrame *previous) { frame->previous = previous; - frame->f_funcobj = (PyObject *)func; + frame->f_funcobj = func; frame->f_executable = PyStackRef_FromPyObjectNew(code); - frame->f_builtins = func->func_builtins; - frame->f_globals = func->func_globals; + PyFunctionObject *func_obj = (PyFunctionObject *)PyStackRef_AsPyObjectBorrow(func); + frame->f_builtins = func_obj->func_builtins; + frame->f_globals = func_obj->func_globals; frame->f_locals = locals; frame->stackpointer = frame->localsplus + code->co_nlocalsplus; frame->frame_obj = NULL; @@ -300,10 +307,11 @@ PyAPI_FUNC(void) _PyThreadState_PopFrame(PyThreadState *tstate, _PyInterpreterFr * Must be guarded by _PyThreadState_HasStackSpace() * Consumes reference to func. */ static inline _PyInterpreterFrame * -_PyFrame_PushUnchecked(PyThreadState *tstate, PyFunctionObject *func, int null_locals_from, _PyInterpreterFrame * previous) +_PyFrame_PushUnchecked(PyThreadState *tstate, _PyStackRef func, int null_locals_from, _PyInterpreterFrame * previous) { CALL_STAT_INC(frames_pushed); - PyCodeObject *code = (PyCodeObject *)func->func_code; + PyFunctionObject *func_obj = (PyFunctionObject *)PyStackRef_AsPyObjectBorrow(func); + PyCodeObject *code = (PyCodeObject *)func_obj->func_code; _PyInterpreterFrame *new_frame = (_PyInterpreterFrame *)tstate->datastack_top; tstate->datastack_top += code->co_framesize; assert(tstate->datastack_top < tstate->datastack_limit); @@ -321,7 +329,7 @@ _PyFrame_PushTrampolineUnchecked(PyThreadState *tstate, PyCodeObject *code, int tstate->datastack_top += code->co_framesize; assert(tstate->datastack_top < tstate->datastack_limit); frame->previous = previous; - frame->f_funcobj = Py_None; + frame->f_funcobj = PyStackRef_None; frame->f_executable = PyStackRef_FromPyObjectNew(code); #ifdef Py_DEBUG frame->f_builtins = NULL; @@ -345,7 +353,7 @@ _PyFrame_PushTrampolineUnchecked(PyThreadState *tstate, PyCodeObject *code, int } PyAPI_FUNC(_PyInterpreterFrame *) -_PyEvalFramePushAndInit(PyThreadState *tstate, PyFunctionObject *func, +_PyEvalFramePushAndInit(PyThreadState *tstate, _PyStackRef func, PyObject *locals, _PyStackRef const* args, size_t argcount, PyObject *kwnames, _PyInterpreterFrame *previous); diff --git a/Include/internal/pycore_gc.h b/Include/internal/pycore_gc.h index cb67a7ee2b3402..cf96f661e6cd7e 100644 --- a/Include/internal/pycore_gc.h +++ b/Include/internal/pycore_gc.h @@ -387,6 +387,17 @@ union _PyStackRef; extern int _PyGC_VisitFrameStack(struct _PyInterpreterFrame *frame, visitproc visit, void *arg); extern int _PyGC_VisitStackRef(union _PyStackRef *ref, visitproc visit, void *arg); +// Like Py_VISIT but for _PyStackRef fields +#define _Py_VISIT_STACKREF(ref) \ + do { \ + if (!PyStackRef_IsNull(ref)) { \ + int vret = _PyGC_VisitStackRef(&(ref), visit, arg); \ + if (vret) \ + return vret; \ + } \ + } while (0) + + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_global_objects_fini_generated.h b/Include/internal/pycore_global_objects_fini_generated.h index 6e948e16b7dbe8..28a76c36801b4b 100644 --- a/Include/internal/pycore_global_objects_fini_generated.h +++ b/Include/internal/pycore_global_objects_fini_generated.h @@ -604,7 +604,6 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__classdictcell__)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__complex__)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__contains__)); - _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__copy__)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__ctypes_from_outparam__)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__del__)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__delattr__)); @@ -769,7 +768,9 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_shutdown)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_slotnames)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_strptime)); - _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_strptime_datetime)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_strptime_datetime_date)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_strptime_datetime_datetime)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_strptime_datetime_time)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_type_)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_uninitialized_submodules)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(_warn_unawaited_coroutine)); diff --git a/Include/internal/pycore_global_strings.h b/Include/internal/pycore_global_strings.h index 5c63a6e519b93d..ac789b06fb8a61 100644 --- a/Include/internal/pycore_global_strings.h +++ b/Include/internal/pycore_global_strings.h @@ -93,7 +93,6 @@ struct _Py_global_strings { STRUCT_FOR_ID(__classdictcell__) STRUCT_FOR_ID(__complex__) STRUCT_FOR_ID(__contains__) - STRUCT_FOR_ID(__copy__) STRUCT_FOR_ID(__ctypes_from_outparam__) STRUCT_FOR_ID(__del__) STRUCT_FOR_ID(__delattr__) @@ -258,7 +257,9 @@ struct _Py_global_strings { STRUCT_FOR_ID(_shutdown) STRUCT_FOR_ID(_slotnames) STRUCT_FOR_ID(_strptime) - STRUCT_FOR_ID(_strptime_datetime) + STRUCT_FOR_ID(_strptime_datetime_date) + STRUCT_FOR_ID(_strptime_datetime_datetime) + STRUCT_FOR_ID(_strptime_datetime_time) STRUCT_FOR_ID(_type_) STRUCT_FOR_ID(_uninitialized_submodules) STRUCT_FOR_ID(_warn_unawaited_coroutine) diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 36366429e8db25..a1898d926ac39f 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -261,7 +261,7 @@ struct _is { struct callable_cache callable_cache; _PyOptimizerObject *optimizer; _PyExecutorObject *executor_list_head; - + size_t trace_run_counter; _rare_events rare_events; PyDict_WatchCallback builtins_dict_watcher; diff --git a/Include/internal/pycore_long.h b/Include/internal/pycore_long.h index 8822147b636dd4..196b4152280a35 100644 --- a/Include/internal/pycore_long.h +++ b/Include/internal/pycore_long.h @@ -79,11 +79,10 @@ static inline PyObject* _PyLong_FromUnsignedChar(unsigned char i) } // _PyLong_Frexp returns a double x and an exponent e such that the -// true value is approximately equal to x * 2**e. e is >= 0. x is +// true value is approximately equal to x * 2**e. x is // 0.0 if and only if the input is 0 (in which case, e and x are both -// zeroes); otherwise, 0.5 <= abs(x) < 1.0. On overflow, which is -// possible if the number of bits doesn't fit into a Py_ssize_t, sets -// OverflowError and returns -1.0 for x, 0 for e. +// zeroes); otherwise, 0.5 <= abs(x) < 1.0. +// Always successful. // // Export for 'math' shared extension PyAPI_DATA(double) _PyLong_Frexp(PyLongObject *a, int64_t *e); @@ -105,10 +104,10 @@ PyAPI_DATA(PyObject*) _PyLong_DivmodNear(PyObject *, PyObject *); PyAPI_DATA(PyObject*) _PyLong_Format(PyObject *obj, int base); // Export for 'math' shared extension -PyAPI_DATA(PyObject*) _PyLong_Rshift(PyObject *, uint64_t); +PyAPI_DATA(PyObject*) _PyLong_Rshift(PyObject *, int64_t); // Export for 'math' shared extension -PyAPI_DATA(PyObject*) _PyLong_Lshift(PyObject *, uint64_t); +PyAPI_DATA(PyObject*) _PyLong_Lshift(PyObject *, int64_t); PyAPI_FUNC(PyObject*) _PyLong_Add(PyLongObject *left, PyLongObject *right); PyAPI_FUNC(PyObject*) _PyLong_Multiply(PyLongObject *left, PyLongObject *right); diff --git a/Include/internal/pycore_magic_number.h b/Include/internal/pycore_magic_number.h index 095eb0f8a89b79..2414d25d41bfbf 100644 --- a/Include/internal/pycore_magic_number.h +++ b/Include/internal/pycore_magic_number.h @@ -258,6 +258,7 @@ Known values: Python 3.14a1 3604 (Do not duplicate test at end of while statements) Python 3.14a1 3605 (Move ENTER_EXECUTOR to opcode 255) Python 3.14a1 3606 (Specialize CALL_KW) + Python 3.14a1 3607 (Add pseudo instructions JUMP_IF_TRUE/FALSE) Python 3.15 will start with 3650 @@ -270,7 +271,7 @@ PC/launcher.c must also be updated. */ -#define PYC_MAGIC_NUMBER 3606 +#define PYC_MAGIC_NUMBER 3607 /* This is equivalent to converting PYC_MAGIC_NUMBER to 2 bytes (little-endian) and then appending b'\r\n'. */ #define PYC_MAGIC_NUMBER_TOKEN \ diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h index 51479afae3833d..3344ede5e92c07 100644 --- a/Include/internal/pycore_opcode_metadata.h +++ b/Include/internal/pycore_opcode_metadata.h @@ -22,6 +22,8 @@ extern "C" { ((OP) == STORE_FAST_MAYBE_NULL) || \ ((OP) == JUMP) || \ ((OP) == JUMP_NO_INTERRUPT) || \ + ((OP) == JUMP_IF_FALSE) || \ + ((OP) == JUMP_IF_TRUE) || \ ((OP) == SETUP_FINALLY) || \ ((OP) == SETUP_CLEANUP) || \ ((OP) == SETUP_WITH) || \ @@ -269,6 +271,10 @@ int _PyOpcode_num_popped(int opcode, int oparg) { return 0; case JUMP_FORWARD: return 0; + case JUMP_IF_FALSE: + return 1; + case JUMP_IF_TRUE: + return 1; case JUMP_NO_INTERRUPT: return 0; case LIST_APPEND: @@ -726,6 +732,10 @@ int _PyOpcode_num_pushed(int opcode, int oparg) { return 0; case JUMP_FORWARD: return 0; + case JUMP_IF_FALSE: + return 1; + case JUMP_IF_TRUE: + return 1; case JUMP_NO_INTERRUPT: return 0; case LIST_APPEND: @@ -956,7 +966,7 @@ enum InstructionFormat { }; #define IS_VALID_OPCODE(OP) \ - (((OP) >= 0) && ((OP) < 264) && \ + (((OP) >= 0) && ((OP) < 266) && \ (_PyOpcode_opcode_metadata[(OP)].valid_entry)) #define HAS_ARG_FLAG (1) @@ -1005,9 +1015,9 @@ struct opcode_metadata { int16_t flags; }; -extern const struct opcode_metadata _PyOpcode_opcode_metadata[264]; +extern const struct opcode_metadata _PyOpcode_opcode_metadata[266]; #ifdef NEED_OPCODE_METADATA -const struct opcode_metadata _PyOpcode_opcode_metadata[264] = { +const struct opcode_metadata _PyOpcode_opcode_metadata[266] = { [BINARY_OP] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [BINARY_OP_ADD_FLOAT] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG }, [BINARY_OP_ADD_INT] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, @@ -1224,6 +1234,8 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[264] = { [YIELD_VALUE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, [_DO_CALL_FUNCTION_EX] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [JUMP] = { true, -1, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [JUMP_IF_FALSE] = { true, -1, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [JUMP_IF_TRUE] = { true, -1, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [JUMP_NO_INTERRUPT] = { true, -1, HAS_ARG_FLAG | HAS_JUMP_FLAG }, [LOAD_CLOSURE] = { true, -1, HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_PURE_FLAG }, [POP_BLOCK] = { true, -1, HAS_PURE_FLAG }, @@ -1422,9 +1434,9 @@ _PyOpcode_macro_expansion[256] = { }; #endif // NEED_OPCODE_METADATA -extern const char *_PyOpcode_OpName[264]; +extern const char *_PyOpcode_OpName[266]; #ifdef NEED_OPCODE_METADATA -const char *_PyOpcode_OpName[264] = { +const char *_PyOpcode_OpName[266] = { [BINARY_OP] = "BINARY_OP", [BINARY_OP_ADD_FLOAT] = "BINARY_OP_ADD_FLOAT", [BINARY_OP_ADD_INT] = "BINARY_OP_ADD_INT", @@ -1543,6 +1555,8 @@ const char *_PyOpcode_OpName[264] = { [JUMP_BACKWARD] = "JUMP_BACKWARD", [JUMP_BACKWARD_NO_INTERRUPT] = "JUMP_BACKWARD_NO_INTERRUPT", [JUMP_FORWARD] = "JUMP_FORWARD", + [JUMP_IF_FALSE] = "JUMP_IF_FALSE", + [JUMP_IF_TRUE] = "JUMP_IF_TRUE", [JUMP_NO_INTERRUPT] = "JUMP_NO_INTERRUPT", [LIST_APPEND] = "LIST_APPEND", [LIST_EXTEND] = "LIST_EXTEND", @@ -1943,25 +1957,28 @@ const uint8_t _PyOpcode_Deopt[256] = { case 235: \ ; struct pseudo_targets { - uint8_t targets[3]; + uint8_t as_sequence; + uint8_t targets[4]; }; -extern const struct pseudo_targets _PyOpcode_PseudoTargets[8]; +extern const struct pseudo_targets _PyOpcode_PseudoTargets[10]; #ifdef NEED_OPCODE_METADATA -const struct pseudo_targets _PyOpcode_PseudoTargets[8] = { - [LOAD_CLOSURE-256] = { { LOAD_FAST, 0, 0 } }, - [STORE_FAST_MAYBE_NULL-256] = { { STORE_FAST, 0, 0 } }, - [JUMP-256] = { { JUMP_FORWARD, JUMP_BACKWARD, 0 } }, - [JUMP_NO_INTERRUPT-256] = { { JUMP_FORWARD, JUMP_BACKWARD_NO_INTERRUPT, 0 } }, - [SETUP_FINALLY-256] = { { NOP, 0, 0 } }, - [SETUP_CLEANUP-256] = { { NOP, 0, 0 } }, - [SETUP_WITH-256] = { { NOP, 0, 0 } }, - [POP_BLOCK-256] = { { NOP, 0, 0 } }, +const struct pseudo_targets _PyOpcode_PseudoTargets[10] = { + [LOAD_CLOSURE-256] = { 0, { LOAD_FAST, 0, 0, 0 } }, + [STORE_FAST_MAYBE_NULL-256] = { 0, { STORE_FAST, 0, 0, 0 } }, + [JUMP-256] = { 0, { JUMP_FORWARD, JUMP_BACKWARD, 0, 0 } }, + [JUMP_NO_INTERRUPT-256] = { 0, { JUMP_FORWARD, JUMP_BACKWARD_NO_INTERRUPT, 0, 0 } }, + [JUMP_IF_FALSE-256] = { 1, { COPY, TO_BOOL, POP_JUMP_IF_FALSE, 0 } }, + [JUMP_IF_TRUE-256] = { 1, { COPY, TO_BOOL, POP_JUMP_IF_TRUE, 0 } }, + [SETUP_FINALLY-256] = { 0, { NOP, 0, 0, 0 } }, + [SETUP_CLEANUP-256] = { 0, { NOP, 0, 0, 0 } }, + [SETUP_WITH-256] = { 0, { NOP, 0, 0, 0 } }, + [POP_BLOCK-256] = { 0, { NOP, 0, 0, 0 } }, }; #endif // NEED_OPCODE_METADATA static inline bool is_pseudo_target(int pseudo, int target) { - if (pseudo < 256 || pseudo >= 264) { + if (pseudo < 256 || pseudo >= 266) { return false; } for (int i = 0; _PyOpcode_PseudoTargets[pseudo-256].targets[i]; i++) { diff --git a/Include/internal/pycore_optimizer.h b/Include/internal/pycore_optimizer.h index 19e54bf122a8bb..f92c0a0cddf906 100644 --- a/Include/internal/pycore_optimizer.h +++ b/Include/internal/pycore_optimizer.h @@ -29,9 +29,10 @@ typedef struct { typedef struct { uint8_t opcode; uint8_t oparg; - uint16_t valid:1; - uint16_t linked:1; - uint16_t chain_depth:14; // Must be big engough for MAX_CHAIN_DEPTH - 1. + uint8_t valid:1; + uint8_t linked:1; + uint8_t chain_depth:6; // Must be big enough for MAX_CHAIN_DEPTH - 1. + bool warm; int index; // Index of ENTER_EXECUTOR (if code isn't NULL, below). _PyBloomFilter bloom; _PyExecutorLinkListNode links; @@ -123,11 +124,18 @@ PyAPI_FUNC(PyObject *) _PyOptimizer_NewUOpOptimizer(void); #ifdef _Py_TIER2 PyAPI_FUNC(void) _Py_Executors_InvalidateDependency(PyInterpreterState *interp, void *obj, int is_invalidation); PyAPI_FUNC(void) _Py_Executors_InvalidateAll(PyInterpreterState *interp, int is_invalidation); +PyAPI_FUNC(void) _Py_Executors_InvalidateCold(PyInterpreterState *interp); + #else # define _Py_Executors_InvalidateDependency(A, B, C) ((void)0) # define _Py_Executors_InvalidateAll(A, B) ((void)0) +# define _Py_Executors_InvalidateCold(A) ((void)0) + #endif +// Used as the threshold to trigger executor invalidation when +// trace_run_counter is greater than this value. +#define JIT_CLEANUP_THRESHOLD 100000 // This is the length of the trace we project initially. #define UOP_MAX_TRACE_LENGTH 800 diff --git a/Include/internal/pycore_runtime_init_generated.h b/Include/internal/pycore_runtime_init_generated.h index bac6b5b8fcfd9d..7847a5c63ebf3f 100644 --- a/Include/internal/pycore_runtime_init_generated.h +++ b/Include/internal/pycore_runtime_init_generated.h @@ -602,7 +602,6 @@ extern "C" { INIT_ID(__classdictcell__), \ INIT_ID(__complex__), \ INIT_ID(__contains__), \ - INIT_ID(__copy__), \ INIT_ID(__ctypes_from_outparam__), \ INIT_ID(__del__), \ INIT_ID(__delattr__), \ @@ -767,7 +766,9 @@ extern "C" { INIT_ID(_shutdown), \ INIT_ID(_slotnames), \ INIT_ID(_strptime), \ - INIT_ID(_strptime_datetime), \ + INIT_ID(_strptime_datetime_date), \ + INIT_ID(_strptime_datetime_datetime), \ + INIT_ID(_strptime_datetime_time), \ INIT_ID(_type_), \ INIT_ID(_uninitialized_submodules), \ INIT_ID(_warn_unawaited_coroutine), \ diff --git a/Include/internal/pycore_typeobject.h b/Include/internal/pycore_typeobject.h index ca5a1e2adb4787..118bc98b35d5e3 100644 --- a/Include/internal/pycore_typeobject.h +++ b/Include/internal/pycore_typeobject.h @@ -209,7 +209,6 @@ extern PyObject * _PyType_GetBases(PyTypeObject *type); extern PyObject * _PyType_GetMRO(PyTypeObject *type); extern PyObject* _PyType_GetSubclasses(PyTypeObject *); extern int _PyType_HasSubclasses(PyTypeObject *); -PyAPI_FUNC(PyObject *) _PyType_GetModuleByDef2(PyTypeObject *, PyTypeObject *, PyModuleDef *); // Export for _testinternalcapi extension. PyAPI_FUNC(PyObject *) _PyType_GetSlotWrapperNames(void); diff --git a/Include/internal/pycore_unicodeobject_generated.h b/Include/internal/pycore_unicodeobject_generated.h index efdbde4c8ea3c6..a688f70a2ba36f 100644 --- a/Include/internal/pycore_unicodeobject_generated.h +++ b/Include/internal/pycore_unicodeobject_generated.h @@ -172,10 +172,6 @@ _PyUnicode_InitStaticStrings(PyInterpreterState *interp) { _PyUnicode_InternStatic(interp, &string); assert(_PyUnicode_CheckConsistency(string, 1)); assert(PyUnicode_GET_LENGTH(string) != 1); - string = &_Py_ID(__copy__); - _PyUnicode_InternStatic(interp, &string); - assert(_PyUnicode_CheckConsistency(string, 1)); - assert(PyUnicode_GET_LENGTH(string) != 1); string = &_Py_ID(__ctypes_from_outparam__); _PyUnicode_InternStatic(interp, &string); assert(_PyUnicode_CheckConsistency(string, 1)); @@ -832,7 +828,15 @@ _PyUnicode_InitStaticStrings(PyInterpreterState *interp) { _PyUnicode_InternStatic(interp, &string); assert(_PyUnicode_CheckConsistency(string, 1)); assert(PyUnicode_GET_LENGTH(string) != 1); - string = &_Py_ID(_strptime_datetime); + string = &_Py_ID(_strptime_datetime_date); + _PyUnicode_InternStatic(interp, &string); + assert(_PyUnicode_CheckConsistency(string, 1)); + assert(PyUnicode_GET_LENGTH(string) != 1); + string = &_Py_ID(_strptime_datetime_datetime); + _PyUnicode_InternStatic(interp, &string); + assert(_PyUnicode_CheckConsistency(string, 1)); + assert(PyUnicode_GET_LENGTH(string) != 1); + string = &_Py_ID(_strptime_datetime_time); _PyUnicode_InternStatic(interp, &string); assert(_PyUnicode_CheckConsistency(string, 1)); assert(PyUnicode_GET_LENGTH(string) != 1); diff --git a/Include/internal/pycore_uop_ids.h b/Include/internal/pycore_uop_ids.h index b950f760d74ac7..927dae88c1fa73 100644 --- a/Include/internal/pycore_uop_ids.h +++ b/Include/internal/pycore_uop_ids.h @@ -222,64 +222,65 @@ extern "C" { #define _LOAD_SUPER_ATTR_METHOD LOAD_SUPER_ATTR_METHOD #define _MAKE_CELL MAKE_CELL #define _MAKE_FUNCTION MAKE_FUNCTION +#define _MAKE_WARM 439 #define _MAP_ADD MAP_ADD #define _MATCH_CLASS MATCH_CLASS #define _MATCH_KEYS MATCH_KEYS #define _MATCH_MAPPING MATCH_MAPPING #define _MATCH_SEQUENCE MATCH_SEQUENCE -#define _MAYBE_EXPAND_METHOD 439 -#define _MONITOR_CALL 440 -#define _MONITOR_JUMP_BACKWARD 441 -#define _MONITOR_RESUME 442 +#define _MAYBE_EXPAND_METHOD 440 +#define _MONITOR_CALL 441 +#define _MONITOR_JUMP_BACKWARD 442 +#define _MONITOR_RESUME 443 #define _NOP NOP #define _POP_EXCEPT POP_EXCEPT -#define _POP_JUMP_IF_FALSE 443 -#define _POP_JUMP_IF_TRUE 444 +#define _POP_JUMP_IF_FALSE 444 +#define _POP_JUMP_IF_TRUE 445 #define _POP_TOP POP_TOP -#define _POP_TOP_LOAD_CONST_INLINE_BORROW 445 +#define _POP_TOP_LOAD_CONST_INLINE_BORROW 446 #define _PUSH_EXC_INFO PUSH_EXC_INFO -#define _PUSH_FRAME 446 +#define _PUSH_FRAME 447 #define _PUSH_NULL PUSH_NULL -#define _PY_FRAME_GENERAL 447 -#define _PY_FRAME_KW 448 -#define _QUICKEN_RESUME 449 -#define _REPLACE_WITH_TRUE 450 +#define _PY_FRAME_GENERAL 448 +#define _PY_FRAME_KW 449 +#define _QUICKEN_RESUME 450 +#define _REPLACE_WITH_TRUE 451 #define _RESUME_CHECK RESUME_CHECK #define _RETURN_GENERATOR RETURN_GENERATOR #define _RETURN_VALUE RETURN_VALUE -#define _SAVE_RETURN_OFFSET 451 -#define _SEND 452 -#define _SEND_GEN_FRAME 453 +#define _SAVE_RETURN_OFFSET 452 +#define _SEND 453 +#define _SEND_GEN_FRAME 454 #define _SETUP_ANNOTATIONS SETUP_ANNOTATIONS #define _SET_ADD SET_ADD #define _SET_FUNCTION_ATTRIBUTE SET_FUNCTION_ATTRIBUTE #define _SET_UPDATE SET_UPDATE -#define _START_EXECUTOR 454 -#define _STORE_ATTR 455 -#define _STORE_ATTR_INSTANCE_VALUE 456 -#define _STORE_ATTR_SLOT 457 -#define _STORE_ATTR_WITH_HINT 458 +#define _START_EXECUTOR 455 +#define _STORE_ATTR 456 +#define _STORE_ATTR_INSTANCE_VALUE 457 +#define _STORE_ATTR_SLOT 458 +#define _STORE_ATTR_WITH_HINT 459 #define _STORE_DEREF STORE_DEREF -#define _STORE_FAST 459 -#define _STORE_FAST_0 460 -#define _STORE_FAST_1 461 -#define _STORE_FAST_2 462 -#define _STORE_FAST_3 463 -#define _STORE_FAST_4 464 -#define _STORE_FAST_5 465 -#define _STORE_FAST_6 466 -#define _STORE_FAST_7 467 +#define _STORE_FAST 460 +#define _STORE_FAST_0 461 +#define _STORE_FAST_1 462 +#define _STORE_FAST_2 463 +#define _STORE_FAST_3 464 +#define _STORE_FAST_4 465 +#define _STORE_FAST_5 466 +#define _STORE_FAST_6 467 +#define _STORE_FAST_7 468 #define _STORE_FAST_LOAD_FAST STORE_FAST_LOAD_FAST #define _STORE_FAST_STORE_FAST STORE_FAST_STORE_FAST #define _STORE_GLOBAL STORE_GLOBAL #define _STORE_NAME STORE_NAME -#define _STORE_SLICE 468 -#define _STORE_SUBSCR 469 +#define _STORE_SLICE 469 +#define _STORE_SUBSCR 470 #define _STORE_SUBSCR_DICT STORE_SUBSCR_DICT #define _STORE_SUBSCR_LIST_INT STORE_SUBSCR_LIST_INT #define _SWAP SWAP -#define _TIER2_RESUME_CHECK 470 -#define _TO_BOOL 471 +#define _TIER2_RESUME_CHECK 471 +#define _TO_BOOL 472 #define _TO_BOOL_BOOL TO_BOOL_BOOL #define _TO_BOOL_INT TO_BOOL_INT #define _TO_BOOL_LIST TO_BOOL_LIST @@ -289,14 +290,14 @@ extern "C" { #define _UNARY_NEGATIVE UNARY_NEGATIVE #define _UNARY_NOT UNARY_NOT #define _UNPACK_EX UNPACK_EX -#define _UNPACK_SEQUENCE 472 +#define _UNPACK_SEQUENCE 473 #define _UNPACK_SEQUENCE_LIST UNPACK_SEQUENCE_LIST #define _UNPACK_SEQUENCE_TUPLE UNPACK_SEQUENCE_TUPLE #define _UNPACK_SEQUENCE_TWO_TUPLE UNPACK_SEQUENCE_TWO_TUPLE #define _WITH_EXCEPT_START WITH_EXCEPT_START #define _YIELD_VALUE YIELD_VALUE #define __DO_CALL_FUNCTION_EX _DO_CALL_FUNCTION_EX -#define MAX_UOP_ID 472 +#define MAX_UOP_ID 473 #ifdef __cplusplus } diff --git a/Include/internal/pycore_uop_metadata.h b/Include/internal/pycore_uop_metadata.h index 4d0ab22e6aa8f3..07606135d7a356 100644 --- a/Include/internal/pycore_uop_metadata.h +++ b/Include/internal/pycore_uop_metadata.h @@ -274,6 +274,7 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_INTERNAL_INCREMENT_OPT_COUNTER] = 0, [_DYNAMIC_EXIT] = HAS_ESCAPES_FLAG, [_START_EXECUTOR] = 0, + [_MAKE_WARM] = 0, [_FATAL_ERROR] = 0, [_CHECK_VALIDITY_AND_SET_IP] = HAS_DEOPT_FLAG, [_DEOPT] = 0, @@ -481,6 +482,7 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = { [_LOAD_SUPER_ATTR_METHOD] = "_LOAD_SUPER_ATTR_METHOD", [_MAKE_CELL] = "_MAKE_CELL", [_MAKE_FUNCTION] = "_MAKE_FUNCTION", + [_MAKE_WARM] = "_MAKE_WARM", [_MAP_ADD] = "_MAP_ADD", [_MATCH_CLASS] = "_MATCH_CLASS", [_MATCH_KEYS] = "_MATCH_KEYS", @@ -1062,6 +1064,8 @@ int _PyUop_num_popped(int opcode, int oparg) return 0; case _START_EXECUTOR: return 0; + case _MAKE_WARM: + return 0; case _FATAL_ERROR: return 0; case _CHECK_VALIDITY_AND_SET_IP: diff --git a/Include/object.h b/Include/object.h index 7124f58f6bdb37..418f2196062df7 100644 --- a/Include/object.h +++ b/Include/object.h @@ -180,6 +180,12 @@ _Py_ThreadId(void) tid = __readfsdword(24); #elif defined(_MSC_VER) && defined(_M_ARM64) tid = __getReg(18); +#elif defined(__MINGW32__) && defined(_M_X64) + tid = __readgsqword(48); +#elif defined(__MINGW32__) && defined(_M_IX86) + tid = __readfsdword(24); +#elif defined(__MINGW32__) && defined(_M_ARM64) + tid = __getReg(18); #elif defined(__i386__) __asm__("movl %%gs:0, %0" : "=r" (tid)); // 32-bit always uses GS #elif defined(__MACH__) && defined(__x86_64__) diff --git a/Include/opcode_ids.h b/Include/opcode_ids.h index 5ded0b41b4830e..8ba1ab25a77770 100644 --- a/Include/opcode_ids.h +++ b/Include/opcode_ids.h @@ -226,13 +226,15 @@ extern "C" { #define INSTRUMENTED_LINE 254 #define ENTER_EXECUTOR 255 #define JUMP 256 -#define JUMP_NO_INTERRUPT 257 -#define LOAD_CLOSURE 258 -#define POP_BLOCK 259 -#define SETUP_CLEANUP 260 -#define SETUP_FINALLY 261 -#define SETUP_WITH 262 -#define STORE_FAST_MAYBE_NULL 263 +#define JUMP_IF_FALSE 257 +#define JUMP_IF_TRUE 258 +#define JUMP_NO_INTERRUPT 259 +#define LOAD_CLOSURE 260 +#define POP_BLOCK 261 +#define SETUP_CLEANUP 262 +#define SETUP_FINALLY 263 +#define SETUP_WITH 264 +#define STORE_FAST_MAYBE_NULL 265 #define HAVE_ARGUMENT 41 #define MIN_SPECIALIZED_OPCODE 150 diff --git a/Include/py_curses.h b/Include/py_curses.h index 79b1b01fcfa594..e11bfedb17d205 100644 --- a/Include/py_curses.h +++ b/Include/py_curses.h @@ -81,8 +81,6 @@ typedef struct { char *encoding; } PyCursesWindowObject; -#define PyCursesWindow_Check(v) Py_IS_TYPE((v), &PyCursesWindow_Type) - #define PyCurses_CAPSULE_NAME "_curses._C_API" @@ -99,6 +97,8 @@ static void **PyCurses_API; #define PyCursesInitialised {if (! ((int (*)(void))PyCurses_API[2]) () ) return NULL;} #define PyCursesInitialisedColor {if (! ((int (*)(void))PyCurses_API[3]) () ) return NULL;} +#define PyCursesWindow_Check(v) Py_IS_TYPE((v), &PyCursesWindow_Type) + #define import_curses() \ PyCurses_API = (void **)PyCapsule_Import(PyCurses_CAPSULE_NAME, 1); diff --git a/InternalDocs/string_interning.md b/InternalDocs/string_interning.md index 358e2c070cd5fa..e0d20632516142 100644 --- a/InternalDocs/string_interning.md +++ b/InternalDocs/string_interning.md @@ -72,7 +72,7 @@ We currently also immortalize strings contained in code objects and similar, specifically in the compiler and in `marshal`. These are “close enough” to immortal: even in use cases like hot reloading or `eval`-ing user input, the number of distinct identifiers and string -constants expected to stay low. +constants is expected to stay low. ## Internal API diff --git a/Lib/_collections_abc.py b/Lib/_collections_abc.py index 75252b3a87f9c4..c2edf6c8856c21 100644 --- a/Lib/_collections_abc.py +++ b/Lib/_collections_abc.py @@ -485,9 +485,10 @@ def __new__(cls, origin, args): def __repr__(self): if len(self.__args__) == 2 and _is_param_expr(self.__args__[0]): return super().__repr__() + from annotationlib import value_to_string return (f'collections.abc.Callable' - f'[[{", ".join([_type_repr(a) for a in self.__args__[:-1]])}], ' - f'{_type_repr(self.__args__[-1])}]') + f'[[{", ".join([value_to_string(a) for a in self.__args__[:-1]])}], ' + f'{value_to_string(self.__args__[-1])}]') def __reduce__(self): args = self.__args__ @@ -524,23 +525,6 @@ def _is_param_expr(obj): names = ('ParamSpec', '_ConcatenateGenericAlias') return obj.__module__ == 'typing' and any(obj.__name__ == name for name in names) -def _type_repr(obj): - """Return the repr() of an object, special-casing types (internal helper). - - Copied from :mod:`typing` since collections.abc - shouldn't depend on that module. - (Keep this roughly in sync with the typing version.) - """ - if isinstance(obj, type): - if obj.__module__ == 'builtins': - return obj.__qualname__ - return f'{obj.__module__}.{obj.__qualname__}' - if obj is Ellipsis: - return '...' - if isinstance(obj, FunctionType): - return obj.__name__ - return repr(obj) - class Callable(metaclass=ABCMeta): diff --git a/Lib/_opcode_metadata.py b/Lib/_opcode_metadata.py index 6e4b33921863cb..dd70c5250c0b1e 100644 --- a/Lib/_opcode_metadata.py +++ b/Lib/_opcode_metadata.py @@ -335,13 +335,15 @@ 'INSTRUMENTED_CALL': 252, 'INSTRUMENTED_JUMP_BACKWARD': 253, 'JUMP': 256, - 'JUMP_NO_INTERRUPT': 257, - 'LOAD_CLOSURE': 258, - 'POP_BLOCK': 259, - 'SETUP_CLEANUP': 260, - 'SETUP_FINALLY': 261, - 'SETUP_WITH': 262, - 'STORE_FAST_MAYBE_NULL': 263, + 'JUMP_IF_FALSE': 257, + 'JUMP_IF_TRUE': 258, + 'JUMP_NO_INTERRUPT': 259, + 'LOAD_CLOSURE': 260, + 'POP_BLOCK': 261, + 'SETUP_CLEANUP': 262, + 'SETUP_FINALLY': 263, + 'SETUP_WITH': 264, + 'STORE_FAST_MAYBE_NULL': 265, } HAVE_ARGUMENT = 41 diff --git a/Lib/_pydatetime.py b/Lib/_pydatetime.py index f8e121eb79a04d..78e03e32896740 100644 --- a/Lib/_pydatetime.py +++ b/Lib/_pydatetime.py @@ -463,6 +463,17 @@ def _parse_isoformat_time(tstr): time_comps = _parse_hh_mm_ss_ff(timestr) + hour, minute, second, microsecond = time_comps + became_next_day = False + error_from_components = False + if (hour == 24): + if all(time_comp == 0 for time_comp in time_comps[1:]): + hour = 0 + time_comps[0] = hour + became_next_day = True + else: + error_from_components = True + tzi = None if tz_pos == len_str and tstr[-1] == 'Z': tzi = timezone.utc @@ -495,7 +506,7 @@ def _parse_isoformat_time(tstr): time_comps.append(tzi) - return time_comps + return time_comps, became_next_day, error_from_components # tuple[int, int, int] -> tuple[int, int, int] version of date.fromisocalendar def _isoweek_to_gregorian(year, week, day): @@ -940,6 +951,7 @@ class date: fromtimestamp() today() fromordinal() + strptime() Operators: @@ -1040,6 +1052,12 @@ def fromisocalendar(cls, year, week, day): This is the inverse of the date.isocalendar() function""" return cls(*_isoweek_to_gregorian(year, week, day)) + @classmethod + def strptime(cls, date_string, format): + """Parse a date string according to the given format (like time.strptime()).""" + import _strptime + return _strptime._strptime_datetime_date(cls, date_string, format) + # Conversions to string def __repr__(self): @@ -1360,6 +1378,7 @@ class time: Constructors: __new__() + strptime() Operators: @@ -1418,6 +1437,12 @@ def __new__(cls, hour=0, minute=0, second=0, microsecond=0, tzinfo=None, *, fold self._fold = fold return self + @classmethod + def strptime(cls, date_string, format): + """string, format -> new time parsed from a string (like time.strptime()).""" + import _strptime + return _strptime._strptime_datetime_time(cls, date_string, format) + # Read-only field accessors @property def hour(self): @@ -1588,7 +1613,7 @@ def fromisoformat(cls, time_string): time_string = time_string.removeprefix('T') try: - return cls(*_parse_isoformat_time(time_string)) + return cls(*_parse_isoformat_time(time_string)[0]) except Exception: raise ValueError(f'Invalid isoformat string: {time_string!r}') @@ -1902,10 +1927,27 @@ def fromisoformat(cls, date_string): if tstr: try: - time_components = _parse_isoformat_time(tstr) + time_components, became_next_day, error_from_components = _parse_isoformat_time(tstr) except ValueError: raise ValueError( f'Invalid isoformat string: {date_string!r}') from None + else: + if error_from_components: + raise ValueError("minute, second, and microsecond must be 0 when hour is 24") + + if became_next_day: + year, month, day = date_components + # Only wrap day/month when it was previously valid + if month <= 12 and day <= (days_in_month := _days_in_month(year, month)): + # Calculate midnight of the next day + day += 1 + if day > days_in_month: + day = 1 + month += 1 + if month > 12: + month = 1 + year += 1 + date_components = [year, month, day] else: time_components = [0, 0, 0, 0, None] @@ -2124,7 +2166,7 @@ def __str__(self): def strptime(cls, date_string, format): 'string, format -> new datetime parsed from a string (like time.strptime()).' import _strptime - return _strptime._strptime_datetime(cls, date_string, format) + return _strptime._strptime_datetime_datetime(cls, date_string, format) def utcoffset(self): """Return the timezone offset as timedelta positive east of UTC (negative west of diff --git a/Lib/_pyrepl/simple_interact.py b/Lib/_pyrepl/simple_interact.py index 3c79cf61d04051..342a4b58bfd0f3 100644 --- a/Lib/_pyrepl/simple_interact.py +++ b/Lib/_pyrepl/simple_interact.py @@ -28,6 +28,7 @@ import _sitebuiltins import linecache import functools +import os import sys import code @@ -50,7 +51,9 @@ def check() -> str: try: _get_reader() except _error as e: - return str(e) or repr(e) or "unknown error" + if term := os.environ.get("TERM", ""): + term = f"; TERM={term}" + return str(str(e) or repr(e) or "unknown error") + term return "" @@ -159,10 +162,8 @@ def maybe_run_command(statement: str) -> bool: input_n += 1 except KeyboardInterrupt: r = _get_reader() - if r.last_command and 'isearch' in r.last_command.__name__: - r.isearch_direction = '' - r.console.forgetinput() - r.pop_input_trans() + if r.input_trans is r.isearch_trans: + r.do_cmd(("isearch-end", [""])) r.pos = len(r.get_unicode()) r.dirty = True r.refresh() diff --git a/Lib/_pyrepl/windows_console.py b/Lib/_pyrepl/windows_console.py index f7a0095d795ac6..d457d2b5a338eb 100644 --- a/Lib/_pyrepl/windows_console.py +++ b/Lib/_pyrepl/windows_console.py @@ -371,15 +371,19 @@ def _getscrollbacksize(self) -> int: return info.srWindow.Bottom # type: ignore[no-any-return] - def _read_input(self) -> INPUT_RECORD | None: + def _read_input(self, block: bool = True) -> INPUT_RECORD | None: + if not block: + events = DWORD() + if not GetNumberOfConsoleInputEvents(InHandle, events): + raise WinError(GetLastError()) + if not events.value: + return None + rec = INPUT_RECORD() read = DWORD() if not ReadConsoleInput(InHandle, rec, 1, read): raise WinError(GetLastError()) - if read.value == 0: - return None - return rec def get_event(self, block: bool = True) -> Event | None: @@ -390,10 +394,8 @@ def get_event(self, block: bool = True) -> Event | None: return self.event_queue.pop() while True: - rec = self._read_input() + rec = self._read_input(block) if rec is None: - if block: - continue return None if rec.EventType == WINDOW_BUFFER_SIZE_EVENT: @@ -464,8 +466,8 @@ def flushoutput(self) -> None: def forgetinput(self) -> None: """Forget all pending, but not yet processed input.""" - while self._read_input() is not None: - pass + if not FlushConsoleInputBuffer(InHandle): + raise WinError(GetLastError()) def getpending(self) -> Event: """Return the characters that have been typed but not yet @@ -590,6 +592,14 @@ class INPUT_RECORD(Structure): ReadConsoleInput.argtypes = [HANDLE, POINTER(INPUT_RECORD), DWORD, POINTER(DWORD)] ReadConsoleInput.restype = BOOL + GetNumberOfConsoleInputEvents = _KERNEL32.GetNumberOfConsoleInputEvents + GetNumberOfConsoleInputEvents.argtypes = [HANDLE, POINTER(DWORD)] + GetNumberOfConsoleInputEvents.restype = BOOL + + FlushConsoleInputBuffer = _KERNEL32.FlushConsoleInputBuffer + FlushConsoleInputBuffer.argtypes = [HANDLE] + FlushConsoleInputBuffer.restype = BOOL + OutHandle = GetStdHandle(STD_OUTPUT_HANDLE) InHandle = GetStdHandle(STD_INPUT_HANDLE) else: @@ -602,5 +612,7 @@ def _win_only(*args, **kwargs): ScrollConsoleScreenBuffer = _win_only SetConsoleMode = _win_only ReadConsoleInput = _win_only + GetNumberOfConsoleInputEvents = _win_only + FlushConsoleInputBuffer = _win_only OutHandle = 0 InHandle = 0 diff --git a/Lib/_strptime.py b/Lib/_strptime.py index 3f868bcab42446..a3f8bb544d518d 100644 --- a/Lib/_strptime.py +++ b/Lib/_strptime.py @@ -567,18 +567,40 @@ def _strptime_time(data_string, format="%a %b %d %H:%M:%S %Y"): tt = _strptime(data_string, format)[0] return time.struct_time(tt[:time._STRUCT_TM_ITEMS]) -def _strptime_datetime(cls, data_string, format="%a %b %d %H:%M:%S %Y"): - """Return a class cls instance based on the input string and the +def _strptime_datetime_date(cls, data_string, format="%a %b %d %Y"): + """Return a date instance based on the input string and the + format string.""" + tt, _, _ = _strptime(data_string, format) + args = tt[:3] + return cls(*args) + +def _parse_tz(tzname, gmtoff, gmtoff_fraction): + tzdelta = datetime_timedelta(seconds=gmtoff, microseconds=gmtoff_fraction) + if tzname: + return datetime_timezone(tzdelta, tzname) + else: + return datetime_timezone(tzdelta) + +def _strptime_datetime_time(cls, data_string, format="%H:%M:%S"): + """Return a time instance based on the input string and the format string.""" tt, fraction, gmtoff_fraction = _strptime(data_string, format) tzname, gmtoff = tt[-2:] - args = tt[:6] + (fraction,) - if gmtoff is not None: - tzdelta = datetime_timedelta(seconds=gmtoff, microseconds=gmtoff_fraction) - if tzname: - tz = datetime_timezone(tzdelta, tzname) - else: - tz = datetime_timezone(tzdelta) - args += (tz,) + args = tt[3:6] + (fraction,) + if gmtoff is None: + return cls(*args) + else: + tz = _parse_tz(tzname, gmtoff, gmtoff_fraction) + return cls(*args, tz) - return cls(*args) +def _strptime_datetime_datetime(cls, data_string, format="%a %b %d %H:%M:%S %Y"): + """Return a datetime instance based on the input string and the + format string.""" + tt, fraction, gmtoff_fraction = _strptime(data_string, format) + tzname, gmtoff = tt[-2:] + args = tt[:6] + (fraction,) + if gmtoff is None: + return cls(*args) + else: + tz = _parse_tz(tzname, gmtoff, gmtoff_fraction) + return cls(*args, tz) diff --git a/Lib/annotationlib.py b/Lib/annotationlib.py index 0a67742a2b3081..a11188722487b2 100644 --- a/Lib/annotationlib.py +++ b/Lib/annotationlib.py @@ -1,8 +1,10 @@ """Helpers for introspecting and wrapping annotations.""" import ast +import builtins import enum import functools +import keyword import sys import types @@ -13,13 +15,15 @@ "call_evaluate_function", "get_annotate_function", "get_annotations", + "annotations_to_string", + "value_to_string", ] class Format(enum.IntEnum): VALUE = 1 FORWARDREF = 2 - SOURCE = 3 + STRING = 3 _Union = None @@ -154,8 +158,19 @@ def evaluate(self, *, globals=None, locals=None, type_params=None, owner=None): globals[param_name] = param locals.pop(param_name, None) - code = self.__forward_code__ - value = eval(code, globals=globals, locals=locals) + arg = self.__forward_arg__ + if arg.isidentifier() and not keyword.iskeyword(arg): + if arg in locals: + value = locals[arg] + elif arg in globals: + value = globals[arg] + elif hasattr(builtins, arg): + return getattr(builtins, arg) + else: + raise NameError(arg) + else: + code = self.__forward_code__ + value = eval(code, globals=globals, locals=locals) self.__forward_evaluated__ = True self.__forward_value__ = value return value @@ -254,7 +269,9 @@ class _Stringifier: __slots__ = _SLOTS def __init__(self, node, globals=None, owner=None, is_class=False, cell=None): - assert isinstance(node, ast.AST) + # Either an AST node or a simple str (for the common case where a ForwardRef + # represent a single name). + assert isinstance(node, (ast.AST, str)) self.__arg__ = None self.__forward_evaluated__ = False self.__forward_value__ = None @@ -267,18 +284,38 @@ def __init__(self, node, globals=None, owner=None, is_class=False, cell=None): self.__cell__ = cell self.__owner__ = owner - def __convert(self, other): + def __convert_to_ast(self, other): if isinstance(other, _Stringifier): + if isinstance(other.__ast_node__, str): + return ast.Name(id=other.__ast_node__) return other.__ast_node__ elif isinstance(other, slice): return ast.Slice( - lower=self.__convert(other.start) if other.start is not None else None, - upper=self.__convert(other.stop) if other.stop is not None else None, - step=self.__convert(other.step) if other.step is not None else None, + lower=( + self.__convert_to_ast(other.start) + if other.start is not None + else None + ), + upper=( + self.__convert_to_ast(other.stop) + if other.stop is not None + else None + ), + step=( + self.__convert_to_ast(other.step) + if other.step is not None + else None + ), ) else: return ast.Constant(value=other) + def __get_ast(self): + node = self.__ast_node__ + if isinstance(node, str): + return ast.Name(id=node) + return node + def __make_new(self, node): return _Stringifier( node, self.__globals__, self.__owner__, self.__forward_is_class__ @@ -292,38 +329,37 @@ def __hash__(self): def __getitem__(self, other): # Special case, to avoid stringifying references to class-scoped variables # as '__classdict__["x"]'. - if ( - isinstance(self.__ast_node__, ast.Name) - and self.__ast_node__.id == "__classdict__" - ): + if self.__ast_node__ == "__classdict__": raise KeyError if isinstance(other, tuple): - elts = [self.__convert(elt) for elt in other] + elts = [self.__convert_to_ast(elt) for elt in other] other = ast.Tuple(elts) else: - other = self.__convert(other) + other = self.__convert_to_ast(other) assert isinstance(other, ast.AST), repr(other) - return self.__make_new(ast.Subscript(self.__ast_node__, other)) + return self.__make_new(ast.Subscript(self.__get_ast(), other)) def __getattr__(self, attr): - return self.__make_new(ast.Attribute(self.__ast_node__, attr)) + return self.__make_new(ast.Attribute(self.__get_ast(), attr)) def __call__(self, *args, **kwargs): return self.__make_new( ast.Call( - self.__ast_node__, - [self.__convert(arg) for arg in args], + self.__get_ast(), + [self.__convert_to_ast(arg) for arg in args], [ - ast.keyword(key, self.__convert(value)) + ast.keyword(key, self.__convert_to_ast(value)) for key, value in kwargs.items() ], ) ) def __iter__(self): - yield self.__make_new(ast.Starred(self.__ast_node__)) + yield self.__make_new(ast.Starred(self.__get_ast())) def __repr__(self): + if isinstance(self.__ast_node__, str): + return self.__ast_node__ return ast.unparse(self.__ast_node__) def __format__(self, format_spec): @@ -332,7 +368,7 @@ def __format__(self, format_spec): def _make_binop(op: ast.AST): def binop(self, other): return self.__make_new( - ast.BinOp(self.__ast_node__, op, self.__convert(other)) + ast.BinOp(self.__get_ast(), op, self.__convert_to_ast(other)) ) return binop @@ -356,7 +392,7 @@ def binop(self, other): def _make_rbinop(op: ast.AST): def rbinop(self, other): return self.__make_new( - ast.BinOp(self.__convert(other), op, self.__ast_node__) + ast.BinOp(self.__convert_to_ast(other), op, self.__get_ast()) ) return rbinop @@ -381,9 +417,9 @@ def _make_compare(op): def compare(self, other): return self.__make_new( ast.Compare( - left=self.__ast_node__, + left=self.__get_ast(), ops=[op], - comparators=[self.__convert(other)], + comparators=[self.__convert_to_ast(other)], ) ) @@ -400,7 +436,7 @@ def compare(self, other): def _make_unary_op(op): def unary_op(self): - return self.__make_new(ast.UnaryOp(op, self.__ast_node__)) + return self.__make_new(ast.UnaryOp(op, self.__get_ast())) return unary_op @@ -422,7 +458,7 @@ def __init__(self, namespace, globals=None, owner=None, is_class=False): def __missing__(self, key): fwdref = _Stringifier( - ast.Name(id=key), + key, globals=self.globals, owner=self.owner, is_class=self.is_class, @@ -445,7 +481,7 @@ def call_annotate_function(annotate, format, *, owner=None, _is_evaluate=False): can be called with any of the format arguments in the Format enum, but compiler-generated __annotate__ functions only support the VALUE format. This function provides additional functionality to call __annotate__ - functions with the FORWARDREF and SOURCE formats. + functions with the FORWARDREF and STRING formats. *annotate* must be an __annotate__ function, which takes a single argument and returns a dict of annotations. @@ -463,8 +499,8 @@ def call_annotate_function(annotate, format, *, owner=None, _is_evaluate=False): return annotate(format) except NotImplementedError: pass - if format == Format.SOURCE: - # SOURCE is implemented by calling the annotate function in a special + if format == Format.STRING: + # STRING is implemented by calling the annotate function in a special # environment where every name lookup results in an instance of _Stringifier. # _Stringifier supports every dunder operation and returns a new _Stringifier. # At the end, we get a dictionary that mostly contains _Stringifier objects (or @@ -480,7 +516,7 @@ def call_annotate_function(annotate, format, *, owner=None, _is_evaluate=False): name = freevars[i] else: name = "__cell__" - fwdref = _Stringifier(ast.Name(id=name)) + fwdref = _Stringifier(name) new_closure.append(types.CellType(fwdref)) closure = tuple(new_closure) else: @@ -500,9 +536,9 @@ def call_annotate_function(annotate, format, *, owner=None, _is_evaluate=False): for key, val in annos.items() } elif format == Format.FORWARDREF: - # FORWARDREF is implemented similarly to SOURCE, but there are two changes, + # FORWARDREF is implemented similarly to STRING, but there are two changes, # at the beginning and the end of the process. - # First, while SOURCE uses an empty dictionary as the namespace, so that all + # First, while STRING uses an empty dictionary as the namespace, so that all # name lookups result in _Stringifier objects, FORWARDREF uses the globals # and builtins, so that defined names map to their real values. # Second, instead of returning strings, we want to return either real values @@ -532,7 +568,7 @@ def call_annotate_function(annotate, format, *, owner=None, _is_evaluate=False): else: name = "__cell__" fwdref = _Stringifier( - ast.Name(id=name), + name, cell=cell, owner=owner, globals=annotate.__globals__, @@ -555,6 +591,9 @@ def call_annotate_function(annotate, format, *, owner=None, _is_evaluate=False): result = func(Format.VALUE) for obj in globals.stringifiers: obj.__class__ = ForwardRef + if isinstance(obj.__ast_node__, str): + obj.__arg__ = obj.__ast_node__ + obj.__ast_node__ = None return result elif format == Format.VALUE: # Should be impossible because __annotate__ functions must not raise @@ -639,28 +678,38 @@ def get_annotations( if eval_str and format != Format.VALUE: raise ValueError("eval_str=True is only supported with format=Format.VALUE") - # For VALUE format, we look at __annotations__ directly. - if format != Format.VALUE: - annotate = get_annotate_function(obj) - if annotate is not None: - ann = call_annotate_function(annotate, format, owner=obj) - if not isinstance(ann, dict): - raise ValueError(f"{obj!r}.__annotate__ returned a non-dict") - return dict(ann) - - if isinstance(obj, type): - try: - ann = _BASE_GET_ANNOTATIONS(obj) - except AttributeError: - # For static types, the descriptor raises AttributeError. - return {} - else: - ann = getattr(obj, "__annotations__", None) - if ann is None: - return {} - - if not isinstance(ann, dict): - raise ValueError(f"{obj!r}.__annotations__ is neither a dict nor None") + match format: + case Format.VALUE: + # For VALUE, we only look at __annotations__ + ann = _get_dunder_annotations(obj) + case Format.FORWARDREF: + # For FORWARDREF, we use __annotations__ if it exists + try: + ann = _get_dunder_annotations(obj) + except NameError: + pass + else: + return dict(ann) + + # But if __annotations__ threw a NameError, we try calling __annotate__ + ann = _get_and_call_annotate(obj, format) + if ann is not None: + return ann + + # If that didn't work either, we have a very weird object: evaluating + # __annotations__ threw NameError and there is no __annotate__. In that case, + # we fall back to trying __annotations__ again. + return dict(_get_dunder_annotations(obj)) + case Format.STRING: + # For STRING, we try to call __annotate__ + ann = _get_and_call_annotate(obj, format) + if ann is not None: + return ann + # But if we didn't get it, we use __annotations__ instead. + ann = _get_dunder_annotations(obj) + return annotations_to_string(ann) + case _: + raise ValueError(f"Unsupported format {format!r}") if not ann: return {} @@ -725,3 +774,57 @@ def get_annotations( for key, value in ann.items() } return return_value + + +def value_to_string(value): + """Convert a Python value to a format suitable for use with the STRING format. + + This is inteded as a helper for tools that support the STRING format but do + not have access to the code that originally produced the annotations. It uses + repr() for most objects. + + """ + if isinstance(value, type): + if value.__module__ == "builtins": + return value.__qualname__ + return f"{value.__module__}.{value.__qualname__}" + if value is ...: + return "..." + if isinstance(value, (types.FunctionType, types.BuiltinFunctionType)): + return value.__name__ + return repr(value) + + +def annotations_to_string(annotations): + """Convert an annotation dict containing values to approximately the STRING format.""" + return { + n: t if isinstance(t, str) else value_to_string(t) + for n, t in annotations.items() + } + + +def _get_and_call_annotate(obj, format): + annotate = get_annotate_function(obj) + if annotate is not None: + ann = call_annotate_function(annotate, format, owner=obj) + if not isinstance(ann, dict): + raise ValueError(f"{obj!r}.__annotate__ returned a non-dict") + return dict(ann) + return None + + +def _get_dunder_annotations(obj): + if isinstance(obj, type): + try: + ann = _BASE_GET_ANNOTATIONS(obj) + except AttributeError: + # For static types, the descriptor raises AttributeError. + return {} + else: + ann = getattr(obj, "__annotations__", None) + if ann is None: + return {} + + if not isinstance(ann, dict): + raise ValueError(f"{obj!r}.__annotations__ is neither a dict nor None") + return dict(ann) diff --git a/Lib/argparse.py b/Lib/argparse.py index 694c46db61d177..874f271959c4fe 100644 --- a/Lib/argparse.py +++ b/Lib/argparse.py @@ -1224,7 +1224,8 @@ def __call__(self, parser, namespace, values, option_string=None): setattr(namespace, key, value) if arg_strings: - vars(namespace).setdefault(_UNRECOGNIZED_ARGS_ATTR, []) + if not hasattr(namespace, _UNRECOGNIZED_ARGS_ATTR): + setattr(namespace, _UNRECOGNIZED_ARGS_ATTR, []) getattr(namespace, _UNRECOGNIZED_ARGS_ATTR).extend(arg_strings) class _ExtendAction(_AppendAction): @@ -1804,8 +1805,8 @@ def add_subparsers(self, **kwargs): kwargs.setdefault('parser_class', type(self)) if 'title' in kwargs or 'description' in kwargs: - title = _(kwargs.pop('title', 'subcommands')) - description = _(kwargs.pop('description', None)) + title = kwargs.pop('title', _('subcommands')) + description = kwargs.pop('description', None) self._subparsers = self.add_argument_group(title, description) else: self._subparsers = self._positionals @@ -1927,11 +1928,11 @@ def _parse_known_args(self, arg_strings, namespace): # otherwise, add the arg to the arg strings # and note the index if it was an option else: - option_tuple = self._parse_optional(arg_string) - if option_tuple is None: + option_tuples = self._parse_optional(arg_string) + if option_tuples is None: pattern = 'A' else: - option_string_indices[i] = option_tuple + option_string_indices[i] = option_tuples pattern = 'O' arg_string_pattern_parts.append(pattern) @@ -1966,8 +1967,16 @@ def take_action(action, argument_strings, option_string=None): def consume_optional(start_index): # get the optional identified at this index - option_tuple = option_string_indices[start_index] - action, option_string, sep, explicit_arg = option_tuple + option_tuples = option_string_indices[start_index] + # if multiple actions match, the option string was ambiguous + if len(option_tuples) > 1: + options = ', '.join([option_string + for action, option_string, sep, explicit_arg in option_tuples]) + args = {'option': arg_string, 'matches': options} + msg = _('ambiguous option: %(option)s could match %(matches)s') + raise ArgumentError(None, msg % args) + + action, option_string, sep, explicit_arg = option_tuples[0] # identify additional optionals in the same arg string # (e.g. -xyz is the same as -x -y -z if no args are required) @@ -2253,7 +2262,7 @@ def _parse_optional(self, arg_string): # if the option string is present in the parser, return the action if arg_string in self._option_string_actions: action = self._option_string_actions[arg_string] - return action, arg_string, None, None + return [(action, arg_string, None, None)] # if it's just a single character, it was meant to be positional if len(arg_string) == 1: @@ -2263,25 +2272,14 @@ def _parse_optional(self, arg_string): option_string, sep, explicit_arg = arg_string.partition('=') if sep and option_string in self._option_string_actions: action = self._option_string_actions[option_string] - return action, option_string, sep, explicit_arg + return [(action, option_string, sep, explicit_arg)] # search through all possible prefixes of the option string # and all actions in the parser for possible interpretations option_tuples = self._get_option_tuples(arg_string) - # if multiple actions match, the option string was ambiguous - if len(option_tuples) > 1: - options = ', '.join([option_string - for action, option_string, sep, explicit_arg in option_tuples]) - args = {'option': arg_string, 'matches': options} - msg = _('ambiguous option: %(option)s could match %(matches)s') - raise ArgumentError(None, msg % args) - - # if exactly one action matched, this segmentation is good, - # so return the parsed action - elif len(option_tuples) == 1: - option_tuple, = option_tuples - return option_tuple + if option_tuples: + return option_tuples # if it was not found as an option, but it looks like a negative # number, it was meant to be positional @@ -2296,7 +2294,7 @@ def _parse_optional(self, arg_string): # it was meant to be an optional but there is no such option # in this parser (though it might be a valid option in a subparser) - return None, arg_string, None, None + return [(None, arg_string, None, None)] def _get_option_tuples(self, option_string): result = [] @@ -2319,7 +2317,9 @@ def _get_option_tuples(self, option_string): # but multiple character options always have to have their argument # separate elif option_string[0] in chars and option_string[1] not in chars: - option_prefix = option_string + option_prefix, sep, explicit_arg = option_string.partition('=') + if not sep: + sep = explicit_arg = None short_option_prefix = option_string[:2] short_explicit_arg = option_string[2:] @@ -2328,9 +2328,9 @@ def _get_option_tuples(self, option_string): action = self._option_string_actions[option_string] tup = action, option_string, '', short_explicit_arg result.append(tup) - elif option_string.startswith(option_prefix): + elif self.allow_abbrev and option_string.startswith(option_prefix): action = self._option_string_actions[option_string] - tup = action, option_string, None, None + tup = action, option_string, sep, explicit_arg result.append(tup) # shouldn't ever get here @@ -2344,43 +2344,40 @@ def _get_nargs_pattern(self, action): # in all examples below, we have to allow for '--' args # which are represented as '-' in the pattern nargs = action.nargs + # if this is an optional action, -- is not allowed + option = action.option_strings # the default (None) is assumed to be a single argument if nargs is None: - nargs_pattern = '(-*A-*)' + nargs_pattern = '([A])' if option else '(-*A-*)' # allow zero or one arguments elif nargs == OPTIONAL: - nargs_pattern = '(-*A?-*)' + nargs_pattern = '(A?)' if option else '(-*A?-*)' # allow zero or more arguments elif nargs == ZERO_OR_MORE: - nargs_pattern = '(-*[A-]*)' + nargs_pattern = '(A*)' if option else '(-*[A-]*)' # allow one or more arguments elif nargs == ONE_OR_MORE: - nargs_pattern = '(-*A[A-]*)' + nargs_pattern = '(A+)' if option else '(-*A[A-]*)' # allow any number of options or arguments elif nargs == REMAINDER: - nargs_pattern = '([-AO]*)' + nargs_pattern = '([AO]*)' if option else '(.*)' # allow one argument followed by any number of options or arguments elif nargs == PARSER: - nargs_pattern = '(-*A[-AO]*)' + nargs_pattern = '(A[AO]*)' if option else '(-*A[-AO]*)' # suppress action, like nargs=0 elif nargs == SUPPRESS: - nargs_pattern = '(-*-*)' + nargs_pattern = '()' if option else '(-*)' # all others should be integers else: - nargs_pattern = '(-*%s-*)' % '-*'.join('A' * nargs) - - # if this is an optional action, -- is not allowed - if action.option_strings: - nargs_pattern = nargs_pattern.replace('-*', '') - nargs_pattern = nargs_pattern.replace('-', '') + nargs_pattern = '([AO]{%d})' % nargs if option else '((?:-*A){%d}-*)' % nargs # return the pattern return nargs_pattern @@ -2483,9 +2480,8 @@ def _get_values(self, action, arg_strings): value = action.const else: value = action.default - if isinstance(value, str): + if isinstance(value, str) and value is not SUPPRESS: value = self._get_value(action, value) - self._check_value(action, value) # when nargs='*' on a positional, if there were no command-line # args, use the default if it is anything other than None @@ -2493,11 +2489,8 @@ def _get_values(self, action, arg_strings): not action.option_strings): if action.default is not None: value = action.default - self._check_value(action, value) else: - # since arg_strings is always [] at this point - # there is no need to use self._check_value(action, value) - value = arg_strings + value = [] # single argument or optional argument produces a single value elif len(arg_strings) == 1 and action.nargs in [None, OPTIONAL]: @@ -2554,11 +2547,15 @@ def _get_value(self, action, arg_string): def _check_value(self, action, value): # converted value must be one of the choices (if specified) - if action.choices is not None and value not in action.choices: - args = {'value': value, - 'choices': ', '.join(map(repr, action.choices))} - msg = _('invalid choice: %(value)r (choose from %(choices)s)') - raise ArgumentError(action, msg % args) + choices = action.choices + if choices is not None: + if isinstance(choices, str): + choices = iter(choices) + if value not in choices: + args = {'value': value, + 'choices': ', '.join(map(repr, action.choices))} + msg = _('invalid choice: %(value)r (choose from %(choices)s)') + raise ArgumentError(action, msg % args) # ======================= # Help-formatting methods diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py index 000647f57dd9e3..ffcc0174e1e245 100644 --- a/Lib/asyncio/base_events.py +++ b/Lib/asyncio/base_events.py @@ -1144,7 +1144,7 @@ async def create_connection( (functools.partial(self._connect_sock, exceptions, addrinfo, laddr_infos) for addrinfo in infos), - happy_eyeballs_delay, loop=self) + happy_eyeballs_delay) if sock is None: exceptions = [exc for sub in exceptions for exc in sub] diff --git a/Lib/asyncio/runners.py b/Lib/asyncio/runners.py index 1b89236599aad7..0e63c34f60f4d9 100644 --- a/Lib/asyncio/runners.py +++ b/Lib/asyncio/runners.py @@ -3,6 +3,7 @@ import contextvars import enum import functools +import inspect import threading import signal from . import coroutines @@ -84,10 +85,7 @@ def get_loop(self): return self._loop def run(self, coro, *, context=None): - """Run a coroutine inside the embedded event loop.""" - if not coroutines.iscoroutine(coro): - raise ValueError("a coroutine was expected, got {!r}".format(coro)) - + """Run code in the embedded event loop.""" if events._get_running_loop() is not None: # fail fast with short traceback raise RuntimeError( @@ -95,8 +93,19 @@ def run(self, coro, *, context=None): self._lazy_init() + if not coroutines.iscoroutine(coro): + if inspect.isawaitable(coro): + async def _wrap_awaitable(awaitable): + return await awaitable + + coro = _wrap_awaitable(coro) + else: + raise TypeError('An asyncio.Future, a coroutine or an ' + 'awaitable is required') + if context is None: context = self._context + task = self._loop.create_task(coro, context=context) if (threading.current_thread() is threading.main_thread() diff --git a/Lib/asyncio/staggered.py b/Lib/asyncio/staggered.py index c3a7441a7b091d..6ccf5c3c269ff0 100644 --- a/Lib/asyncio/staggered.py +++ b/Lib/asyncio/staggered.py @@ -4,11 +4,12 @@ import contextlib -from . import events -from . import exceptions as exceptions_mod from . import locks from . import tasks +from . import taskgroups +class _Done(Exception): + pass async def staggered_race(coro_fns, delay, *, loop=None): """Run coroutines with staggered start times and take the first to finish. @@ -42,8 +43,6 @@ async def staggered_race(coro_fns, delay, *, loop=None): delay: amount of time, in seconds, between starting coroutines. If ``None``, the coroutines will run sequentially. - loop: the event loop to use. - Returns: tuple *(winner_result, winner_index, exceptions)* where @@ -62,36 +61,11 @@ async def staggered_race(coro_fns, delay, *, loop=None): """ # TODO: when we have aiter() and anext(), allow async iterables in coro_fns. - loop = loop or events.get_running_loop() - enum_coro_fns = enumerate(coro_fns) winner_result = None winner_index = None exceptions = [] - running_tasks = [] - - async def run_one_coro(previous_failed) -> None: - # Wait for the previous task to finish, or for delay seconds - if previous_failed is not None: - with contextlib.suppress(exceptions_mod.TimeoutError): - # Use asyncio.wait_for() instead of asyncio.wait() here, so - # that if we get cancelled at this point, Event.wait() is also - # cancelled, otherwise there will be a "Task destroyed but it is - # pending" later. - await tasks.wait_for(previous_failed.wait(), delay) - # Get the next coroutine to run - try: - this_index, coro_fn = next(enum_coro_fns) - except StopIteration: - return - # Start task that will run the next coroutine - this_failed = locks.Event() - next_task = loop.create_task(run_one_coro(this_failed)) - running_tasks.append(next_task) - assert len(running_tasks) == this_index + 2 - # Prepare place to put this coroutine's exceptions if not won - exceptions.append(None) - assert len(exceptions) == this_index + 1 + async def run_one_coro(this_index, coro_fn, this_failed): try: result = await coro_fn() except (SystemExit, KeyboardInterrupt): @@ -105,34 +79,23 @@ async def run_one_coro(previous_failed) -> None: assert winner_index is None winner_index = this_index winner_result = result - # Cancel all other tasks. We take care to not cancel the current - # task as well. If we do so, then since there is no `await` after - # here and CancelledError are usually thrown at one, we will - # encounter a curious corner case where the current task will end - # up as done() == True, cancelled() == False, exception() == - # asyncio.CancelledError. This behavior is specified in - # https://bugs.python.org/issue30048 - for i, t in enumerate(running_tasks): - if i != this_index: - t.cancel() - - first_task = loop.create_task(run_one_coro(None)) - running_tasks.append(first_task) + raise _Done + try: - # Wait for a growing list of tasks to all finish: poor man's version of - # curio's TaskGroup or trio's nursery - done_count = 0 - while done_count != len(running_tasks): - done, _ = await tasks.wait(running_tasks) - done_count = len(done) - # If run_one_coro raises an unhandled exception, it's probably a - # programming error, and I want to see it. - if __debug__: - for d in done: - if d.done() and not d.cancelled() and d.exception(): - raise d.exception() - return winner_result, winner_index, exceptions - finally: - # Make sure no tasks are left running if we leave this function - for t in running_tasks: - t.cancel() + tg = taskgroups.TaskGroup() + # Intentionally override the loop in the TaskGroup to avoid + # using the running loop, preserving backwards compatibility + # TaskGroup only starts using `_loop` after `__aenter__` + # so overriding it here is safe. + tg._loop = loop + async with tg: + for this_index, coro_fn in enumerate(coro_fns): + this_failed = locks.Event() + exceptions.append(None) + tg.create_task(run_one_coro(this_index, coro_fn, this_failed)) + with contextlib.suppress(TimeoutError): + await tasks.wait_for(this_failed.wait(), delay) + except* _Done: + pass + + return winner_result, winner_index, exceptions diff --git a/Lib/collections/abc.py b/Lib/collections/abc.py index bff76291634604..034ba377a0dbec 100644 --- a/Lib/collections/abc.py +++ b/Lib/collections/abc.py @@ -1,3 +1,3 @@ -from _collections_abc import * -from _collections_abc import __all__ # noqa: F401 -from _collections_abc import _CallableGenericAlias # noqa: F401 +import _collections_abc +import sys +sys.modules[__name__] = _collections_abc diff --git a/Lib/dataclasses.py b/Lib/dataclasses.py index ac7d40cf2cac2e..bdda7cc6c00f5d 100644 --- a/Lib/dataclasses.py +++ b/Lib/dataclasses.py @@ -283,11 +283,12 @@ class Field: 'compare', 'metadata', 'kw_only', + 'doc', '_field_type', # Private: not to be used by user code. ) def __init__(self, default, default_factory, init, repr, hash, compare, - metadata, kw_only): + metadata, kw_only, doc): self.name = None self.type = None self.default = default @@ -300,6 +301,7 @@ def __init__(self, default, default_factory, init, repr, hash, compare, if metadata is None else types.MappingProxyType(metadata)) self.kw_only = kw_only + self.doc = doc self._field_type = None @recursive_repr() @@ -315,6 +317,7 @@ def __repr__(self): f'compare={self.compare!r},' f'metadata={self.metadata!r},' f'kw_only={self.kw_only!r},' + f'doc={self.doc!r},' f'_field_type={self._field_type}' ')') @@ -382,7 +385,7 @@ def __repr__(self): # so that a type checker can be told (via overloads) that this is a # function whose type depends on its parameters. def field(*, default=MISSING, default_factory=MISSING, init=True, repr=True, - hash=None, compare=True, metadata=None, kw_only=MISSING): + hash=None, compare=True, metadata=None, kw_only=MISSING, doc=None): """Return an object to identify dataclass fields. default is the default value of the field. default_factory is a @@ -394,7 +397,7 @@ def field(*, default=MISSING, default_factory=MISSING, init=True, repr=True, comparison functions. metadata, if specified, must be a mapping which is stored but not otherwise examined by dataclass. If kw_only is true, the field will become a keyword-only parameter to - __init__(). + __init__(). doc is an optional docstring for this field. It is an error to specify both default and default_factory. """ @@ -402,7 +405,7 @@ def field(*, default=MISSING, default_factory=MISSING, init=True, repr=True, if default is not MISSING and default_factory is not MISSING: raise ValueError('cannot specify both default and default_factory') return Field(default, default_factory, init, repr, hash, compare, - metadata, kw_only) + metadata, kw_only, doc) def _fields_in_init_order(fields): @@ -690,11 +693,8 @@ def _frozen_get_del_attr(cls, fields, func_builder): def _is_classvar(a_type, typing): - # This test uses a typing internal class, but it's the best way to - # test if this is a ClassVar. return (a_type is typing.ClassVar - or (type(a_type) is typing._GenericAlias - and a_type.__origin__ is typing.ClassVar)) + or (typing.get_origin(a_type) is typing.ClassVar)) def _is_initvar(a_type, dataclasses): @@ -1177,7 +1177,7 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen, if weakref_slot and not slots: raise TypeError('weakref_slot is True but slots is False') if slots: - cls = _add_slots(cls, frozen, weakref_slot) + cls = _add_slots(cls, frozen, weakref_slot, fields) abc.update_abstractmethods(cls) @@ -1221,9 +1221,56 @@ def _get_slots(cls): raise TypeError(f"Slots of '{cls.__name__}' cannot be determined") -def _add_slots(cls, is_frozen, weakref_slot): - # Need to create a new class, since we can't set __slots__ - # after a class has been created. +def _update_func_cell_for__class__(f, oldcls, newcls): + # Returns True if we update a cell, else False. + if f is None: + # f will be None in the case of a property where not all of + # fget, fset, and fdel are used. Nothing to do in that case. + return False + try: + idx = f.__code__.co_freevars.index("__class__") + except ValueError: + # This function doesn't reference __class__, so nothing to do. + return False + # Fix the cell to point to the new class, if it's already pointing + # at the old class. I'm not convinced that the "is oldcls" test + # is needed, but other than performance can't hurt. + closure = f.__closure__[idx] + if closure.cell_contents is oldcls: + closure.cell_contents = newcls + return True + return False + + +def _create_slots(defined_fields, inherited_slots, field_names, weakref_slot): + # The slots for our class. Remove slots from our base classes. Add + # '__weakref__' if weakref_slot was given, unless it is already present. + seen_docs = False + slots = {} + for slot in itertools.filterfalse( + inherited_slots.__contains__, + itertools.chain( + # gh-93521: '__weakref__' also needs to be filtered out if + # already present in inherited_slots + field_names, ('__weakref__',) if weakref_slot else () + ) + ): + doc = getattr(defined_fields.get(slot), 'doc', None) + if doc is not None: + seen_docs = True + slots.update({slot: doc}) + + # We only return dict if there's at least one doc member, + # otherwise we return tuple, which is the old default format. + if seen_docs: + return slots + return tuple(slots) + + +def _add_slots(cls, is_frozen, weakref_slot, defined_fields): + # Need to create a new class, since we can't set __slots__ after a + # class has been created, and the @dataclass decorator is called + # after the class is created. # Make sure __slots__ isn't already set. if '__slots__' in cls.__dict__: @@ -1236,17 +1283,9 @@ def _add_slots(cls, is_frozen, weakref_slot): inherited_slots = set( itertools.chain.from_iterable(map(_get_slots, cls.__mro__[1:-1])) ) - # The slots for our class. Remove slots from our base classes. Add - # '__weakref__' if weakref_slot was given, unless it is already present. - cls_dict["__slots__"] = tuple( - itertools.filterfalse( - inherited_slots.__contains__, - itertools.chain( - # gh-93521: '__weakref__' also needs to be filtered out if - # already present in inherited_slots - field_names, ('__weakref__',) if weakref_slot else () - ) - ), + + cls_dict["__slots__"] = _create_slots( + defined_fields, inherited_slots, field_names, weakref_slot, ) for field_name in field_names: @@ -1262,18 +1301,37 @@ def _add_slots(cls, is_frozen, weakref_slot): # And finally create the class. qualname = getattr(cls, '__qualname__', None) - cls = type(cls)(cls.__name__, cls.__bases__, cls_dict) + newcls = type(cls)(cls.__name__, cls.__bases__, cls_dict) if qualname is not None: - cls.__qualname__ = qualname + newcls.__qualname__ = qualname if is_frozen: # Need this for pickling frozen classes with slots. if '__getstate__' not in cls_dict: - cls.__getstate__ = _dataclass_getstate + newcls.__getstate__ = _dataclass_getstate if '__setstate__' not in cls_dict: - cls.__setstate__ = _dataclass_setstate - - return cls + newcls.__setstate__ = _dataclass_setstate + + # Fix up any closures which reference __class__. This is used to + # fix zero argument super so that it points to the correct class + # (the newly created one, which we're returning) and not the + # original class. We can break out of this loop as soon as we + # make an update, since all closures for a class will share a + # given cell. + for member in newcls.__dict__.values(): + # If this is a wrapped function, unwrap it. + member = inspect.unwrap(member) + + if isinstance(member, types.FunctionType): + if _update_func_cell_for__class__(member, cls, newcls): + break + elif isinstance(member, property): + if (_update_func_cell_for__class__(member.fget, cls, newcls) + or _update_func_cell_for__class__(member.fset, cls, newcls) + or _update_func_cell_for__class__(member.fdel, cls, newcls)): + break + + return newcls def dataclass(cls=None, /, *, init=True, repr=True, eq=True, order=False, diff --git a/Lib/decimal.py b/Lib/decimal.py index f8c548eb1c6ecf..530bdfb38953d9 100644 --- a/Lib/decimal.py +++ b/Lib/decimal.py @@ -103,6 +103,7 @@ from _decimal import __version__ # noqa: F401 from _decimal import __libmpdec_version__ # noqa: F401 except ImportError: - from _pydecimal import * - from _pydecimal import __version__ # noqa: F401 - from _pydecimal import __libmpdec_version__ # noqa: F401 + import _pydecimal + import sys + _pydecimal.__doc__ = __doc__ + sys.modules[__name__] = _pydecimal diff --git a/Lib/dis.py b/Lib/dis.py index f8832b30497822..e87e6a78469ab0 100644 --- a/Lib/dis.py +++ b/Lib/dis.py @@ -32,7 +32,7 @@ CONVERT_VALUE = opmap['CONVERT_VALUE'] SET_FUNCTION_ATTRIBUTE = opmap['SET_FUNCTION_ATTRIBUTE'] -FUNCTION_ATTR_FLAGS = ('defaults', 'kwdefaults', 'annotations', 'closure') +FUNCTION_ATTR_FLAGS = ('defaults', 'kwdefaults', 'annotations', 'closure', 'annotate') ENTER_EXECUTOR = opmap['ENTER_EXECUTOR'] LOAD_CONST = opmap['LOAD_CONST'] diff --git a/Lib/doctest.py b/Lib/doctest.py index ea7d275c91db04..bb281fc483c41c 100644 --- a/Lib/doctest.py +++ b/Lib/doctest.py @@ -389,11 +389,11 @@ def __init__(self, out): # still use input() to get user input self.use_rawinput = 1 - def set_trace(self, frame=None): + def set_trace(self, frame=None, *, commands=None): self.__debugger_used = True if frame is None: frame = sys._getframe().f_back - pdb.Pdb.set_trace(self, frame) + pdb.Pdb.set_trace(self, frame, commands=commands) def set_continue(self): # Calling set_continue unconditionally would break unit test diff --git a/Lib/functools.py b/Lib/functools.py index 49ea9a2f6999f5..9d53d3601559b2 100644 --- a/Lib/functools.py +++ b/Lib/functools.py @@ -6,24 +6,22 @@ # Written by Nick Coghlan , # Raymond Hettinger , # and Łukasz Langa . -# Copyright (C) 2006-2013 Python Software Foundation. +# Copyright (C) 2006-2024 Python Software Foundation. # See C source code for _functools credits/copyright __all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES', 'total_ordering', 'cache', 'cmp_to_key', 'lru_cache', 'reduce', 'partial', 'partialmethod', 'singledispatch', 'singledispatchmethod', - 'cached_property'] + 'cached_property', 'Placeholder'] from abc import get_cache_token from collections import namedtuple -# import types, weakref # Deferred to single_dispatch() +# import weakref # Deferred to single_dispatch() +from operator import itemgetter from reprlib import recursive_repr -from types import MethodType +from types import GenericAlias, MethodType, MappingProxyType, UnionType from _thread import RLock -# Avoid importing types, so we can speedup import time -GenericAlias = type(list[int]) - ################################################################################ ### update_wrapper() and wraps() decorator ################################################################################ @@ -274,43 +272,125 @@ def reduce(function, sequence, initial=_initial_missing): ### partial() argument application ################################################################################ -# Purely functional, no descriptor behaviour -class partial: - """New function with partial application of the given arguments - and keywords. + +class _PlaceholderType: + """The type of the Placeholder singleton. + + Used as a placeholder for partial arguments. """ + __instance = None + __slots__ = () + + def __init_subclass__(cls, *args, **kwargs): + raise TypeError(f"type '{cls.__name__}' is not an acceptable base type") + + def __new__(cls): + if cls.__instance is None: + cls.__instance = object.__new__(cls) + return cls.__instance - __slots__ = "func", "args", "keywords", "__dict__", "__weakref__" + def __repr__(self): + return 'Placeholder' - def __new__(cls, func, /, *args, **keywords): + def __reduce__(self): + return 'Placeholder' + +Placeholder = _PlaceholderType() + +def _partial_prepare_merger(args): + if not args: + return 0, None + nargs = len(args) + order = [] + j = nargs + for i, a in enumerate(args): + if a is Placeholder: + order.append(j) + j += 1 + else: + order.append(i) + phcount = j - nargs + merger = itemgetter(*order) if phcount else None + return phcount, merger + +def _partial_new(cls, func, /, *args, **keywords): + if issubclass(cls, partial): + base_cls = partial if not callable(func): raise TypeError("the first argument must be callable") + else: + base_cls = partialmethod + # func could be a descriptor like classmethod which isn't callable + if not callable(func) and not hasattr(func, "__get__"): + raise TypeError(f"the first argument {func!r} must be a callable " + "or a descriptor") + if args and args[-1] is Placeholder: + raise TypeError("trailing Placeholders are not allowed") + if isinstance(func, base_cls): + pto_phcount = func._phcount + tot_args = func.args + if args: + tot_args += args + if pto_phcount: + # merge args with args of `func` which is `partial` + nargs = len(args) + if nargs < pto_phcount: + tot_args += (Placeholder,) * (pto_phcount - nargs) + tot_args = func._merger(tot_args) + if nargs > pto_phcount: + tot_args += args[pto_phcount:] + phcount, merger = _partial_prepare_merger(tot_args) + else: # works for both pto_phcount == 0 and != 0 + phcount, merger = pto_phcount, func._merger + keywords = {**func.keywords, **keywords} + func = func.func + else: + tot_args = args + phcount, merger = _partial_prepare_merger(tot_args) + + self = object.__new__(cls) + self.func = func + self.args = tot_args + self.keywords = keywords + self._phcount = phcount + self._merger = merger + return self + +def _partial_repr(self): + cls = type(self) + module = cls.__module__ + qualname = cls.__qualname__ + args = [repr(self.func)] + args.extend(map(repr, self.args)) + args.extend(f"{k}={v!r}" for k, v in self.keywords.items()) + return f"{module}.{qualname}({', '.join(args)})" - if isinstance(func, partial): - args = func.args + args - keywords = {**func.keywords, **keywords} - func = func.func +# Purely functional, no descriptor behaviour +class partial: + """New function with partial application of the given arguments + and keywords. + """ - self = super(partial, cls).__new__(cls) + __slots__ = ("func", "args", "keywords", "_phcount", "_merger", + "__dict__", "__weakref__") - self.func = func - self.args = args - self.keywords = keywords - return self + __new__ = _partial_new + __repr__ = recursive_repr()(_partial_repr) def __call__(self, /, *args, **keywords): + phcount = self._phcount + if phcount: + try: + pto_args = self._merger(self.args + args) + args = args[phcount:] + except IndexError: + raise TypeError("missing positional arguments " + "in 'partial' call; expected " + f"at least {phcount}, got {len(args)}") + else: + pto_args = self.args keywords = {**self.keywords, **keywords} - return self.func(*self.args, *args, **keywords) - - @recursive_repr() - def __repr__(self): - cls = type(self) - qualname = cls.__qualname__ - module = cls.__module__ - args = [repr(self.func)] - args.extend(repr(x) for x in self.args) - args.extend(f"{k}={v!r}" for (k, v) in self.keywords.items()) - return f"{module}.{qualname}({', '.join(args)})" + return self.func(*pto_args, *args, **keywords) def __get__(self, obj, objtype=None): if obj is None: @@ -332,6 +412,10 @@ def __setstate__(self, state): (namespace is not None and not isinstance(namespace, dict))): raise TypeError("invalid partial state") + if args and args[-1] is Placeholder: + raise TypeError("trailing Placeholders are not allowed") + phcount, merger = _partial_prepare_merger(args) + args = tuple(args) # just in case it's a subclass if kwds is None: kwds = {} @@ -344,53 +428,40 @@ def __setstate__(self, state): self.func = func self.args = args self.keywords = kwds + self._phcount = phcount + self._merger = merger try: - from _functools import partial + from _functools import partial, Placeholder, _PlaceholderType except ImportError: pass # Descriptor version -class partialmethod(object): +class partialmethod: """Method descriptor with partial application of the given arguments and keywords. Supports wrapping existing descriptors and handles non-descriptor callables as instance methods. """ - - def __init__(self, func, /, *args, **keywords): - if not callable(func) and not hasattr(func, "__get__"): - raise TypeError("{!r} is not callable or a descriptor" - .format(func)) - - # func could be a descriptor like classmethod which isn't callable, - # so we can't inherit from partial (it verifies func is callable) - if isinstance(func, partialmethod): - # flattening is mandatory in order to place cls/self before all - # other arguments - # it's also more efficient since only one function will be called - self.func = func.func - self.args = func.args + args - self.keywords = {**func.keywords, **keywords} - else: - self.func = func - self.args = args - self.keywords = keywords - - def __repr__(self): - cls = type(self) - module = cls.__module__ - qualname = cls.__qualname__ - args = [repr(self.func)] - args.extend(map(repr, self.args)) - args.extend(f"{k}={v!r}" for k, v in self.keywords.items()) - return f"{module}.{qualname}({', '.join(args)})" + __new__ = _partial_new + __repr__ = _partial_repr def _make_unbound_method(self): def _method(cls_or_self, /, *args, **keywords): + phcount = self._phcount + if phcount: + try: + pto_args = self._merger(self.args + args) + args = args[phcount:] + except IndexError: + raise TypeError("missing positional arguments " + "in 'partialmethod' call; expected " + f"at least {phcount}, got {len(args)}") + else: + pto_args = self.args keywords = {**self.keywords, **keywords} - return self.func(cls_or_self, *self.args, *args, **keywords) + return self.func(cls_or_self, *pto_args, *args, **keywords) _method.__isabstractmethod__ = self.__isabstractmethod__ _method.__partialmethod__ = self return _method @@ -826,7 +897,7 @@ def singledispatch(func): # There are many programs that use functools without singledispatch, so we # trade-off making singledispatch marginally slower for the benefit of # making start-up of such applications slightly faster. - import types, weakref + import weakref registry = {} dispatch_cache = weakref.WeakKeyDictionary() @@ -857,7 +928,7 @@ def dispatch(cls): def _is_union_type(cls): from typing import get_origin, Union - return get_origin(cls) in {Union, types.UnionType} + return get_origin(cls) in {Union, UnionType} def _is_valid_dispatch_type(cls): if isinstance(cls, type): @@ -934,7 +1005,7 @@ def wrapper(*args, **kw): registry[object] = func wrapper.register = register wrapper.dispatch = dispatch - wrapper.registry = types.MappingProxyType(registry) + wrapper.registry = MappingProxyType(registry) wrapper._clear_cache = dispatch_cache.clear update_wrapper(wrapper, func) return wrapper diff --git a/Lib/inspect.py b/Lib/inspect.py index 90c44cf74007a8..17314564f35397 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -970,10 +970,12 @@ def findsource(object): if isclass(object): try: - firstlineno = vars(object)['__firstlineno__'] + lnum = vars(object)['__firstlineno__'] - 1 except (TypeError, KeyError): raise OSError('source code not available') - return lines, firstlineno - 1 + if lnum >= len(lines): + raise OSError('lineno is out of bounds') + return lines, lnum if ismethod(object): object = object.__func__ @@ -1930,7 +1932,12 @@ def _signature_get_partial(wrapped_sig, partial, extra_args=()): if param.kind is _POSITIONAL_ONLY: # If positional-only parameter is bound by partial, # it effectively disappears from the signature - new_params.pop(param_name) + # However, if it is a Placeholder it is not removed + # And also looses default value + if arg_value is functools.Placeholder: + new_params[param_name] = param.replace(default=_empty) + else: + new_params.pop(param_name) continue if param.kind is _POSITIONAL_OR_KEYWORD: @@ -1952,7 +1959,17 @@ def _signature_get_partial(wrapped_sig, partial, extra_args=()): new_params[param_name] = param.replace(default=arg_value) else: # was passed as a positional argument - new_params.pop(param.name) + # Do not pop if it is a Placeholder + # also change kind to positional only + # and remove default + if arg_value is functools.Placeholder: + new_param = param.replace( + kind=_POSITIONAL_ONLY, + default=_empty + ) + new_params[param_name] = new_param + else: + new_params.pop(param_name) continue if param.kind is _KEYWORD_ONLY: @@ -2446,6 +2463,11 @@ def _signature_from_callable(obj, *, sig_params = tuple(sig.parameters.values()) assert (not sig_params or first_wrapped_param is not sig_params[0]) + # If there were placeholders set, + # first param is transformed to positional only + if partialmethod.args.count(functools.Placeholder): + first_wrapped_param = first_wrapped_param.replace( + kind=Parameter.POSITIONAL_ONLY) new_params = (first_wrapped_param,) + sig_params return sig.replace(parameters=new_params) diff --git a/Lib/multiprocessing/context.py b/Lib/multiprocessing/context.py index ddcc7e7900999e..d0a3ad00e53ad8 100644 --- a/Lib/multiprocessing/context.py +++ b/Lib/multiprocessing/context.py @@ -259,13 +259,12 @@ def get_start_method(self, allow_none=False): def get_all_start_methods(self): """Returns a list of the supported start methods, default first.""" - if sys.platform == 'win32': - return ['spawn'] - else: - methods = ['spawn', 'fork'] if sys.platform == 'darwin' else ['fork', 'spawn'] - if reduction.HAVE_SEND_HANDLE: - methods.append('forkserver') - return methods + default = self._default_context.get_start_method() + start_method_names = [default] + start_method_names.extend( + name for name in _concrete_contexts if name != default + ) + return start_method_names # @@ -320,14 +319,15 @@ def _check_available(self): 'spawn': SpawnContext(), 'forkserver': ForkServerContext(), } - if sys.platform == 'darwin': - # bpo-33725: running arbitrary code after fork() is no longer reliable - # on macOS since macOS 10.14 (Mojave). Use spawn by default instead. - _default_context = DefaultContext(_concrete_contexts['spawn']) + # bpo-33725: running arbitrary code after fork() is no longer reliable + # on macOS since macOS 10.14 (Mojave). Use spawn by default instead. + # gh-84559: We changed everyones default to a thread safeish one in 3.14. + if reduction.HAVE_SEND_HANDLE and sys.platform != 'darwin': + _default_context = DefaultContext(_concrete_contexts['forkserver']) else: - _default_context = DefaultContext(_concrete_contexts['fork']) + _default_context = DefaultContext(_concrete_contexts['spawn']) -else: +else: # Windows class SpawnProcess(process.BaseProcess): _start_method = 'spawn' diff --git a/Lib/pdb.py b/Lib/pdb.py index 228de489a9cef1..aea6fb70ae3106 100644 --- a/Lib/pdb.py +++ b/Lib/pdb.py @@ -309,7 +309,7 @@ class Pdb(bdb.Bdb, cmd.Cmd): _last_pdb_instance = None def __init__(self, completekey='tab', stdin=None, stdout=None, skip=None, - nosigint=False, readrc=True): + nosigint=False, readrc=True, mode=None): bdb.Bdb.__init__(self, skip=skip) cmd.Cmd.__init__(self, completekey, stdin, stdout) sys.audit("pdb.Pdb") @@ -321,6 +321,7 @@ def __init__(self, completekey='tab', stdin=None, stdout=None, skip=None, self.mainpyfile = '' self._wait_for_mainpyfile = False self.tb_lineno = {} + self.mode = mode # Try to load readline if it exists try: import readline @@ -349,10 +350,6 @@ def __init__(self, completekey='tab', stdin=None, stdout=None, skip=None, pass self.commands = {} # associates a command list to breakpoint numbers - self.commands_doprompt = {} # for each bp num, tells if the prompt - # must be disp. after execing the cmd list - self.commands_silent = {} # for each bp num, tells if the stack trace - # must be disp. after execing the cmd list self.commands_defining = False # True while in the process of defining # a command list self.commands_bnum = None # The breakpoint number for which we are @@ -361,10 +358,14 @@ def __init__(self, completekey='tab', stdin=None, stdout=None, skip=None, self._chained_exceptions = tuple() self._chained_exception_index = 0 - def set_trace(self, frame=None): + def set_trace(self, frame=None, *, commands=None): Pdb._last_pdb_instance = self if frame is None: frame = sys._getframe().f_back + + if commands is not None: + self.rcLines.extend(commands) + super().set_trace(frame) def sigint_handler(self, signum, frame): @@ -398,13 +399,6 @@ def setup(self, f, tb): self.tb_lineno[tb.tb_frame] = lineno tb = tb.tb_next self.curframe = self.stack[self.curindex][0] - # The f_locals dictionary used to be updated from the actual frame - # locals whenever the .f_locals accessor was called, so it was - # cached here to ensure that modifications were not overwritten. While - # the caching is no longer required now that f_locals is a direct proxy - # on optimized frames, it's also harmless, so the code structure has - # been left unchanged. - self.curframe_locals = self.curframe.f_locals self.set_convenience_variable(self.curframe, '_frame', self.curframe) if self._chained_exceptions: @@ -439,8 +433,8 @@ def user_line(self, frame): or frame.f_lineno <= 0): return self._wait_for_mainpyfile = False - if self.bp_commands(frame): - self.interaction(frame, None) + self.bp_commands(frame) + self.interaction(frame, None) user_opcode = user_line @@ -455,18 +449,9 @@ def bp_commands(self, frame): self.currentbp in self.commands: currentbp = self.currentbp self.currentbp = 0 - lastcmd_back = self.lastcmd - self.setup(frame, None) for line in self.commands[currentbp]: - self.onecmd(line) - self.lastcmd = lastcmd_back - if not self.commands_silent[currentbp]: - self.print_stack_entry(self.stack[self.curindex]) - if self.commands_doprompt[currentbp]: - self._cmdloop() - self.forget() - return - return 1 + self.cmdqueue.append(line) + self.cmdqueue.append(f'_pdbcmd_restore_lastcmd {self.lastcmd}') def user_return(self, frame, return_value): """This function is called when a return trap is set here.""" @@ -727,7 +712,7 @@ def _exec_in_closure(self, source, globals, locals): def default(self, line): if line[:1] == '!': line = line[1:].strip() - locals = self.curframe_locals + locals = self.curframe.f_locals globals = self.curframe.f_globals try: buffer = line @@ -865,15 +850,15 @@ def handle_command_def(self, line): cmd, arg, line = self.parseline(line) if not cmd: return False - if cmd == 'silent': - self.commands_silent[self.commands_bnum] = True - return False # continue to handle other cmd def in the cmd list - elif cmd == 'end': + if cmd == 'end': return True # end of cmd list elif cmd == 'EOF': print('') return True # end of cmd list cmdlist = self.commands[self.commands_bnum] + if cmd == 'silent': + cmdlist.append('_pdbcmd_silence_frame_status') + return False # continue to handle other cmd def in the cmd list if arg: cmdlist.append(cmd+' '+arg) else: @@ -885,7 +870,6 @@ def handle_command_def(self, line): func = self.default # one of the resuming commands if func.__name__ in self.commands_resuming: - self.commands_doprompt[self.commands_bnum] = False return True return False @@ -955,7 +939,7 @@ def _complete_expression(self, text, line, begidx, endidx): # Collect globals and locals. It is usually not really sensible to also # complete builtins, and they clutter the namespace quite heavily, so we # leave them out. - ns = {**self.curframe.f_globals, **self.curframe_locals} + ns = {**self.curframe.f_globals, **self.curframe.f_locals} if text.startswith("$"): # Complete convenience variables conv_vars = self.curframe.f_globals.get('__pdb_convenience_variables', {}) @@ -986,7 +970,7 @@ def completedefault(self, text, line, begidx, endidx): # Use rlcompleter to do the completion state = 0 matches = [] - completer = Completer(self.curframe.f_globals | self.curframe_locals) + completer = Completer(self.curframe.f_globals | self.curframe.f_locals) while (match := completer.complete(text, state)) is not None: matches.append(match) state += 1 @@ -998,6 +982,13 @@ def _pdbcmd_print_frame_status(self, arg): self.print_stack_trace(0) self._show_display() + def _pdbcmd_silence_frame_status(self, arg): + if self.cmdqueue and self.cmdqueue[-1] == '_pdbcmd_print_frame_status': + self.cmdqueue.pop() + + def _pdbcmd_restore_lastcmd(self, arg): + self.lastcmd = arg + # Command definitions, called by cmdloop() # The argument is the remaining string on the command line # Return true to exit from the command loop @@ -1056,14 +1047,10 @@ def do_commands(self, arg): self.commands_bnum = bnum # Save old definitions for the case of a keyboard interrupt. if bnum in self.commands: - old_command_defs = (self.commands[bnum], - self.commands_doprompt[bnum], - self.commands_silent[bnum]) + old_commands = self.commands[bnum] else: - old_command_defs = None + old_commands = None self.commands[bnum] = [] - self.commands_doprompt[bnum] = True - self.commands_silent[bnum] = False prompt_back = self.prompt self.prompt = '(com) ' @@ -1072,14 +1059,10 @@ def do_commands(self, arg): self.cmdloop() except KeyboardInterrupt: # Restore old definitions. - if old_command_defs: - self.commands[bnum] = old_command_defs[0] - self.commands_doprompt[bnum] = old_command_defs[1] - self.commands_silent[bnum] = old_command_defs[2] + if old_commands: + self.commands[bnum] = old_commands else: del self.commands[bnum] - del self.commands_doprompt[bnum] - del self.commands_silent[bnum] self.error('command definition aborted, old commands restored') finally: self.commands_defining = False @@ -1148,7 +1131,7 @@ def do_break(self, arg, temporary = 0): try: func = eval(arg, self.curframe.f_globals, - self.curframe_locals) + self.curframe.f_locals) except: func = arg try: @@ -1453,7 +1436,6 @@ def _select_frame(self, number): assert 0 <= number < len(self.stack) self.curindex = number self.curframe = self.stack[self.curindex][0] - self.curframe_locals = self.curframe.f_locals self.set_convenience_variable(self.curframe, '_frame', self.curframe) self.print_stack_entry(self.stack[self.curindex]) self.lineno = None @@ -1607,6 +1589,11 @@ def do_run(self, arg): sys.argv. History, breakpoints, actions and debugger options are preserved. "restart" is an alias for "run". """ + if self.mode == 'inline': + self.error('run/restart command is disabled when pdb is running in inline mode.\n' + 'Use the command line interface to enable restarting your program\n' + 'e.g. "python -m pdb myscript.py"') + return if arg: import shlex argv0 = sys.argv[0:1] @@ -1694,7 +1681,7 @@ def do_debug(self, arg): """ sys.settrace(None) globals = self.curframe.f_globals - locals = self.curframe_locals + locals = self.curframe.f_locals p = Pdb(self.completekey, self.stdin, self.stdout) p.prompt = "(%s) " % self.prompt.strip() self.message("ENTERING RECURSIVE DEBUGGER") @@ -1739,7 +1726,7 @@ def do_args(self, arg): self._print_invalid_arg(arg) return co = self.curframe.f_code - dict = self.curframe_locals + dict = self.curframe.f_locals n = co.co_argcount + co.co_kwonlyargcount if co.co_flags & inspect.CO_VARARGS: n = n+1 if co.co_flags & inspect.CO_VARKEYWORDS: n = n+1 @@ -1759,15 +1746,15 @@ def do_retval(self, arg): if arg: self._print_invalid_arg(arg) return - if '__return__' in self.curframe_locals: - self.message(self._safe_repr(self.curframe_locals['__return__'], "retval")) + if '__return__' in self.curframe.f_locals: + self.message(self._safe_repr(self.curframe.f_locals['__return__'], "retval")) else: self.error('Not yet returned!') do_rv = do_retval def _getval(self, arg): try: - return eval(arg, self.curframe.f_globals, self.curframe_locals) + return eval(arg, self.curframe.f_globals, self.curframe.f_locals) except: self._error_exc() raise @@ -1775,7 +1762,7 @@ def _getval(self, arg): def _getval_except(self, arg, frame=None): try: if frame is None: - return eval(arg, self.curframe.f_globals, self.curframe_locals) + return eval(arg, self.curframe.f_globals, self.curframe.f_locals) else: return eval(arg, frame.f_globals, frame.f_locals) except BaseException as exc: @@ -2019,7 +2006,7 @@ def do_interact(self, arg): Start an interactive interpreter whose global namespace contains all the (global and local) names found in the current scope. """ - ns = {**self.curframe.f_globals, **self.curframe_locals} + ns = {**self.curframe.f_globals, **self.curframe.f_locals} console = _PdbInteractiveConsole(ns, message=self.message) console.interact(banner="*pdb interact start*", exitmsg="*exit from pdb interact command*") @@ -2091,7 +2078,7 @@ def complete_unalias(self, text, line, begidx, endidx): # List of all the commands making the program resume execution. commands_resuming = ['do_continue', 'do_step', 'do_next', 'do_return', - 'do_quit', 'do_jump'] + 'do_until', 'do_quit', 'do_jump'] # Print a traceback starting at the top stack frame. # The most recently entered frame is printed last; @@ -2350,21 +2337,22 @@ def runcall(*args, **kwds): """ return Pdb().runcall(*args, **kwds) -def set_trace(*, header=None): +def set_trace(*, header=None, commands=None): """Enter the debugger at the calling stack frame. This is useful to hard-code a breakpoint at a given point in a program, even if the code is not otherwise being debugged (e.g. when an assertion fails). If given, *header* is printed to the console - just before debugging begins. + just before debugging begins. *commands* is an optional list of + pdb commands to run when the debugger starts. """ if Pdb._last_pdb_instance is not None: pdb = Pdb._last_pdb_instance else: - pdb = Pdb() + pdb = Pdb(mode='inline') if header is not None: pdb.message(header) - pdb.set_trace(sys._getframe().f_back) + pdb.set_trace(sys._getframe().f_back, commands=commands) # Post-Mortem interface @@ -2476,7 +2464,7 @@ def main(): # modified by the script being debugged. It's a bad idea when it was # changed by the user from the command line. There is a "restart" command # which allows explicit specification of command line arguments. - pdb = Pdb() + pdb = Pdb(mode='cli') pdb.rcLines.extend(opts.commands) while True: try: diff --git a/Lib/pty.py b/Lib/pty.py index eb3d5f1ff657bb..4b25ac32c8da14 100644 --- a/Lib/pty.py +++ b/Lib/pty.py @@ -39,8 +39,8 @@ def openpty(): except ImportError: return master_fd, slave_fd try: - ioctl(result, I_PUSH, "ptem") - ioctl(result, I_PUSH, "ldterm") + ioctl(slave_fd, I_PUSH, "ptem") + ioctl(slave_fd, I_PUSH, "ldterm") except OSError: pass return master_fd, slave_fd diff --git a/Lib/pydoc.py b/Lib/pydoc.py index d376592d69d40d..eec7b0770f56ca 100644 --- a/Lib/pydoc.py +++ b/Lib/pydoc.py @@ -1870,6 +1870,7 @@ class Helper: ':': 'SLICINGS DICTIONARYLITERALS', '@': 'def class', '\\': 'STRINGS', + ':=': 'ASSIGNMENTEXPRESSIONS', '_': 'PRIVATENAMES', '__': 'PRIVATENAMES SPECIALMETHODS', '`': 'BACKQUOTES', @@ -1963,6 +1964,7 @@ class Helper: 'ASSERTION': 'assert', 'ASSIGNMENT': ('assignment', 'AUGMENTEDASSIGNMENT'), 'AUGMENTEDASSIGNMENT': ('augassign', 'NUMBERMETHODS'), + 'ASSIGNMENTEXPRESSIONS': ('assignment-expressions', ''), 'DELETION': 'del', 'RETURNING': 'return', 'IMPORTING': 'import', diff --git a/Lib/pydoc_data/topics.py b/Lib/pydoc_data/topics.py index 4643df80e44aaf..97bb4eb52f4386 100644 --- a/Lib/pydoc_data/topics.py +++ b/Lib/pydoc_data/topics.py @@ -416,6 +416,34 @@ 'some expressions (like un-parenthesized tuple expressions) ' 'caused a\n' 'syntax error.\n', + 'assignment-expressions': 'Assignment expressions\n' + '**********************\n' + '\n' + 'An assignment expression (sometimes also called a “named expression”' + '\nor “walrus”) assigns an expression to an identifier, while also\n' + 'returning the value of the expression.\n' + '\n' + 'One common use case is when handling matched regular expressions:\n' + '\n' + ' if matching := pattern.search(data):\n' + ' do_something(matching)\n' + '\n' + 'Or, when processing a file stream in chunks:\n' + '\n' + ' while chunk := file.read(9000):\n' + ' process(chunk)\n' + '\n' + 'Assignment expressions must be surrounded by parentheses when used as\n' + 'expression statements and when used as sub-expressions in slicing,\n' + 'conditional, lambda, keyword-argument, and comprehension-if\n' + 'expressions and in assert, with, and assignment statements. In all\n' + 'other places where they can be used, parentheses are not required,\n' + 'including in if and while statements.\n' + '\n' + 'Added in version 3.8.\n' + 'See also:\n' + '\n' + ' **PEP 572** - Assignment Expressions\n', 'async': 'Coroutines\n' '**********\n' '\n' diff --git a/Lib/shutil.py b/Lib/shutil.py index 89c12b76b61dfc..dab3ca5ee91245 100644 --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -48,7 +48,7 @@ # This should never be removed, see rationale in: # https://bugs.python.org/issue43743#msg393429 _USE_CP_SENDFILE = (hasattr(os, "sendfile") - and sys.platform.startswith(("linux", "android", "solaris"))) + and sys.platform.startswith(("linux", "android", "sunos"))) _HAS_FCOPYFILE = posix and hasattr(posix, "_fcopyfile") # macOS # CMD defaults in Windows 10 diff --git a/Lib/statistics.py b/Lib/statistics.py index d3dd0d530c31cf..f193fcdc241aa9 100644 --- a/Lib/statistics.py +++ b/Lib/statistics.py @@ -870,9 +870,12 @@ def f_inv(y): return f_inv def _quartic_invcdf_estimate(p): + # A handrolled piecewise approximation. There is no magic here. sign, p = (1.0, p) if p <= 1/2 else (-1.0, 1.0 - p) + if p < 0.0106: + return ((2.0 * p) ** 0.3838 - 1.0) * sign x = (2.0 * p) ** 0.4258865685331 - 1.0 - if p >= 0.004 < 0.499: + if p < 0.499: x += 0.026818732 * sin(7.101753784 * p + 2.73230839482953) return x * sign @@ -886,8 +889,11 @@ def quartic_kernel(): return pdf, cdf, invcdf, support def _triweight_invcdf_estimate(p): + # A handrolled piecewise approximation. There is no magic here. sign, p = (1.0, p) if p <= 1/2 else (-1.0, 1.0 - p) x = (2.0 * p) ** 0.3400218741872791 - 1.0 + if 0.00001 < p < 0.499: + x -= 0.033 * sin(1.07 * tau * (p - 0.035)) return x * sign @register('triweight') diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index 4b3a0645cfc84a..a059a6b8340448 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -5553,15 +5553,29 @@ def test_set_get(self): multiprocessing.set_start_method(old_method, force=True) self.assertGreaterEqual(count, 1) - def test_get_all(self): + def test_get_all_start_methods(self): methods = multiprocessing.get_all_start_methods() + self.assertIn('spawn', methods) if sys.platform == 'win32': self.assertEqual(methods, ['spawn']) + elif sys.platform == 'darwin': + self.assertEqual(methods[0], 'spawn') # The default is first. + # Whether these work or not, they remain available on macOS. + self.assertIn('fork', methods) + self.assertIn('forkserver', methods) else: - self.assertTrue(methods == ['fork', 'spawn'] or - methods == ['spawn', 'fork'] or - methods == ['fork', 'spawn', 'forkserver'] or - methods == ['spawn', 'fork', 'forkserver']) + # POSIX + self.assertIn('fork', methods) + if other_methods := set(methods) - {'fork', 'spawn'}: + # If there are more than those two, forkserver must be one. + self.assertEqual({'forkserver'}, other_methods) + # The default is the first method in the list. + self.assertIn(methods[0], {'forkserver', 'spawn'}, + msg='3.14+ default must not be fork') + if methods[0] == 'spawn': + # Confirm that the current default selection logic prefers + # forkserver vs spawn when available. + self.assertNotIn('forkserver', methods) def test_preload_resources(self): if multiprocessing.get_start_method() != 'forkserver': diff --git a/Lib/test/certdata/keycert.pem.reference b/Lib/test/certdata/keycert.pem.reference new file mode 100644 index 00000000000000..f9a82f35f340dd --- /dev/null +++ b/Lib/test/certdata/keycert.pem.reference @@ -0,0 +1,13 @@ +{'issuer': ((('countryName', 'XY'),), + (('localityName', 'Castle Anthrax'),), + (('organizationName', 'Python Software Foundation'),), + (('commonName', 'localhost'),)), + 'notAfter': 'Jan 24 04:21:36 2043 GMT', + 'notBefore': 'Nov 25 04:21:36 2023 GMT', + 'serialNumber': '53E14833F7546C29256DD0F034F776C5E983004C', + 'subject': ((('countryName', 'XY'),), + (('localityName', 'Castle Anthrax'),), + (('organizationName', 'Python Software Foundation'),), + (('commonName', 'localhost'),)), + 'subjectAltName': (('DNS', 'localhost'),), + 'version': 3} diff --git a/Lib/test/certdata/keycert3.pem.reference b/Lib/test/certdata/keycert3.pem.reference new file mode 100644 index 00000000000000..04a749c920b38c --- /dev/null +++ b/Lib/test/certdata/keycert3.pem.reference @@ -0,0 +1,15 @@ +{'OCSP': ('http://testca.pythontest.net/testca/ocsp/',), + 'caIssuers': ('http://testca.pythontest.net/testca/pycacert.cer',), + 'crlDistributionPoints': ('http://testca.pythontest.net/testca/revocation.crl',), + 'issuer': ((('countryName', 'XY'),), + (('organizationName', 'Python Software Foundation CA'),), + (('commonName', 'our-ca-server'),)), + 'notAfter': 'Oct 28 14:23:16 2037 GMT', + 'notBefore': 'Aug 29 14:23:16 2018 GMT', + 'serialNumber': 'CB2D80995A69525C', + 'subject': ((('countryName', 'XY'),), + (('localityName', 'Castle Anthrax'),), + (('organizationName', 'Python Software Foundation'),), + (('commonName', 'localhost'),)), + 'subjectAltName': (('DNS', 'localhost'),), + 'version': 3} \ No newline at end of file diff --git a/Lib/test/certdata/make_ssl_certs.py b/Lib/test/certdata/make_ssl_certs.py index 6626b93976a585..5e626baf550c5b 100644 --- a/Lib/test/certdata/make_ssl_certs.py +++ b/Lib/test/certdata/make_ssl_certs.py @@ -1,6 +1,7 @@ """Make the custom certificate and private key files used by test_ssl and friends.""" +import argparse import os import pprint import shutil @@ -8,7 +9,8 @@ from subprocess import * startdate = "20180829142316Z" -enddate = "20371028142316Z" +enddate_default = "20371028142316Z" +days_default = "7000" req_template = """ [ default ] @@ -79,8 +81,8 @@ default_startdate = {startdate} enddate = {enddate} default_enddate = {enddate} - default_days = 7000 - default_crl_days = 7000 + default_days = {days} + default_crl_days = {days} certificate = pycacert.pem private_key = pycakey.pem serial = $dir/serial @@ -117,7 +119,7 @@ here = os.path.abspath(os.path.dirname(__file__)) -def make_cert_key(hostname, sign=False, extra_san='', +def make_cert_key(cmdlineargs, hostname, sign=False, extra_san='', ext='req_x509_extensions_full', key='rsa:3072'): print("creating cert for " + hostname) tempnames = [] @@ -130,11 +132,12 @@ def make_cert_key(hostname, sign=False, extra_san='', hostname=hostname, extra_san=extra_san, startdate=startdate, - enddate=enddate + enddate=cmdlineargs.enddate, + days=cmdlineargs.days ) with open(req_file, 'w') as f: f.write(req) - args = ['req', '-new', '-nodes', '-days', '7000', + args = ['req', '-new', '-nodes', '-days', cmdlineargs.days, '-newkey', key, '-keyout', key_file, '-extensions', ext, '-config', req_file] @@ -175,7 +178,7 @@ def make_cert_key(hostname, sign=False, extra_san='', def unmake_ca(): shutil.rmtree(TMP_CADIR) -def make_ca(): +def make_ca(cmdlineargs): os.mkdir(TMP_CADIR) with open(os.path.join('cadir','index.txt'),'a+') as f: pass # empty file @@ -192,7 +195,8 @@ def make_ca(): hostname='our-ca-server', extra_san='', startdate=startdate, - enddate=enddate + enddate=cmdlineargs.enddate, + days=cmdlineargs.days ) t.write(req) t.flush() @@ -219,14 +223,22 @@ def make_ca(): shutil.copy('capath/ceff1710.0', 'capath/b1930218.0') -def print_cert(path): +def write_cert_reference(path): import _ssl - pprint.pprint(_ssl._test_decode_cert(path)) + refdata = pprint.pformat(_ssl._test_decode_cert(path)) + print(refdata) + with open(path + '.reference', 'w') as f: + print(refdata, file=f) if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Make the custom certificate and private key files used by test_ssl and friends.') + parser.add_argument('--days', default=days_default) + parser.add_argument('--enddate', default=enddate_default) + cmdlineargs = parser.parse_args() + os.chdir(here) - cert, key = make_cert_key('localhost', ext='req_x509_extensions_simple') + cert, key = make_cert_key(cmdlineargs, 'localhost', ext='req_x509_extensions_simple') with open('ssl_cert.pem', 'w') as f: f.write(cert) with open('ssl_key.pem', 'w') as f: @@ -243,24 +255,24 @@ def print_cert(path): f.write(cert) # For certificate matching tests - make_ca() - cert, key = make_cert_key('fakehostname', ext='req_x509_extensions_simple') + make_ca(cmdlineargs) + cert, key = make_cert_key(cmdlineargs, 'fakehostname', ext='req_x509_extensions_simple') with open('keycert2.pem', 'w') as f: f.write(key) f.write(cert) - cert, key = make_cert_key('localhost', sign=True) + cert, key = make_cert_key(cmdlineargs, 'localhost', sign=True) with open('keycert3.pem', 'w') as f: f.write(key) f.write(cert) - cert, key = make_cert_key('fakehostname', sign=True) + cert, key = make_cert_key(cmdlineargs, 'fakehostname', sign=True) with open('keycert4.pem', 'w') as f: f.write(key) f.write(cert) cert, key = make_cert_key( - 'localhost-ecc', sign=True, key='param:secp384r1.pem' + cmdlineargs, 'localhost-ecc', sign=True, key='param:secp384r1.pem' ) with open('keycertecc.pem', 'w') as f: f.write(key) @@ -280,7 +292,7 @@ def print_cert(path): 'RID.1 = 1.2.3.4.5', ] - cert, key = make_cert_key('allsans', sign=True, extra_san='\n'.join(extra_san)) + cert, key = make_cert_key(cmdlineargs, 'allsans', sign=True, extra_san='\n'.join(extra_san)) with open('allsans.pem', 'w') as f: f.write(key) f.write(cert) @@ -297,17 +309,17 @@ def print_cert(path): ] # IDN SANS, signed - cert, key = make_cert_key('idnsans', sign=True, extra_san='\n'.join(extra_san)) + cert, key = make_cert_key(cmdlineargs, 'idnsans', sign=True, extra_san='\n'.join(extra_san)) with open('idnsans.pem', 'w') as f: f.write(key) f.write(cert) - cert, key = make_cert_key('nosan', sign=True, ext='req_x509_extensions_nosan') + cert, key = make_cert_key(cmdlineargs, 'nosan', sign=True, ext='req_x509_extensions_nosan') with open('nosan.pem', 'w') as f: f.write(key) f.write(cert) unmake_ca() - print("update Lib/test/test_ssl.py and Lib/test/test_asyncio/utils.py") - print_cert('keycert.pem') - print_cert('keycert3.pem') + print("Writing out reference data for Lib/test/test_ssl.py and Lib/test/test_asyncio/utils.py") + write_cert_reference('keycert.pem') + write_cert_reference('keycert3.pem') diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py index aef24e11393f6a..c81408b344968d 100644 --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -1106,6 +1106,85 @@ def test_delta_non_days_ignored(self): dt2 = dt - delta self.assertEqual(dt2, dt - days) + def test_strptime(self): + inputs = [ + # Basic valid cases + (date(1998, 2, 3), '1998-02-03', '%Y-%m-%d'), + (date(2004, 12, 2), '2004-12-02', '%Y-%m-%d'), + + # Edge cases: Leap year + (date(2020, 2, 29), '2020-02-29', '%Y-%m-%d'), # Valid leap year date + + # bpo-34482: Handle surrogate pairs + (date(2004, 12, 2), '2004-12\ud80002', '%Y-%m\ud800%d'), + (date(2004, 12, 2), '2004\ud80012-02', '%Y\ud800%m-%d'), + + # Month/day variations + (date(2004, 2, 1), '2004-02', '%Y-%m'), # No day provided + (date(2004, 2, 1), '02-2004', '%m-%Y'), # Month and year swapped + + # Different day-month-year formats + (date(2004, 12, 2), '02/12/2004', '%d/%m/%Y'), # Day/Month/Year + (date(2004, 12, 2), '12/02/2004', '%m/%d/%Y'), # Month/Day/Year + + # Different separators + (date(2023, 9, 24), '24.09.2023', '%d.%m.%Y'), # Dots as separators + (date(2023, 9, 24), '24-09-2023', '%d-%m-%Y'), # Dashes + (date(2023, 9, 24), '2023/09/24', '%Y/%m/%d'), # Slashes + + # Handling years with fewer digits + (date(127, 2, 3), '0127-02-03', '%Y-%m-%d'), + (date(99, 2, 3), '0099-02-03', '%Y-%m-%d'), + (date(5, 2, 3), '0005-02-03', '%Y-%m-%d'), + + # Variations on ISO 8601 format + (date(2023, 9, 25), '2023-W39-1', '%G-W%V-%u'), # ISO week date (Week 39, Monday) + (date(2023, 9, 25), '2023-268', '%Y-%j'), # Year and day of the year (Julian) + ] + for expected, string, format in inputs: + with self.subTest(string=string, format=format): + got = date.strptime(string, format) + self.assertEqual(expected, got) + self.assertIs(type(got), date) + + def test_strptime_single_digit(self): + # bpo-34903: Check that single digit dates are allowed. + strptime = date.strptime + with self.assertRaises(ValueError): + # %y does require two digits. + newdate = strptime('01/02/3', '%d/%m/%y') + + d1 = date(2003, 2, 1) + d2 = date(2003, 1, 2) + d3 = date(2003, 1, 25) + inputs = [ + ('%d', '1/02/03', '%d/%m/%y', d1), + ('%m', '01/2/03', '%d/%m/%y', d1), + ('%j', '2/03', '%j/%y', d2), + ('%w', '6/04/03', '%w/%U/%y', d1), + # %u requires a single digit. + ('%W', '6/4/2003', '%u/%W/%Y', d1), + ('%V', '6/4/2003', '%u/%V/%G', d3), + ] + for reason, string, format, target in inputs: + reason = 'test single digit ' + reason + with self.subTest(reason=reason, + string=string, + format=format, + target=target): + newdate = strptime(string, format) + self.assertEqual(newdate, target, msg=reason) + + @warnings_helper.ignore_warnings(category=DeprecationWarning) + def test_strptime_leap_year(self): + # GH-70647: warns if parsing a format with a day and no year. + with self.assertRaises(ValueError): + # The existing behavior that GH-70647 seeks to change. + date.strptime('02-29', '%m-%d') + with self._assertNotWarns(DeprecationWarning): + date.strptime('20-03-14', '%y-%m-%d') + date.strptime('02-29,2024', '%m-%d,%Y') + class SubclassDate(date): sub_var = 1 @@ -2732,7 +2811,8 @@ def test_utcnow(self): def test_strptime(self): string = '2004-12-01 13:02:47.197' format = '%Y-%m-%d %H:%M:%S.%f' - expected = _strptime._strptime_datetime(self.theclass, string, format) + expected = _strptime._strptime_datetime_datetime(self.theclass, string, + format) got = self.theclass.strptime(string, format) self.assertEqual(expected, got) self.assertIs(type(expected), self.theclass) @@ -2746,8 +2826,8 @@ def test_strptime(self): ] for string, format in inputs: with self.subTest(string=string, format=format): - expected = _strptime._strptime_datetime(self.theclass, string, - format) + expected = _strptime._strptime_datetime_datetime(self.theclass, + string, format) got = self.theclass.strptime(string, format) self.assertEqual(expected, got) @@ -3342,6 +3422,9 @@ def test_fromisoformat_datetime_examples(self): ('2025-01-02T03:04:05,678+00:00:10', self.theclass(2025, 1, 2, 3, 4, 5, 678000, tzinfo=timezone(timedelta(seconds=10)))), + ('2025-01-02T24:00:00', self.theclass(2025, 1, 3, 0, 0, 0)), + ('2025-01-31T24:00:00', self.theclass(2025, 2, 1, 0, 0, 0)), + ('2025-12-31T24:00:00', self.theclass(2026, 1, 1, 0, 0, 0)) ] for input_str, expected in examples: @@ -3378,6 +3461,12 @@ def test_fromisoformat_fails_datetime(self): '2009-04-19T12:30:45.123456-05:00a', # Extra text '2009-04-19T12:30:45.123-05:00a', # Extra text '2009-04-19T12:30:45-05:00a', # Extra text + '2009-04-19T24:00:00.000001', # Has non-zero microseconds on 24:00 + '2009-04-19T24:00:01.000000', # Has non-zero seconds on 24:00 + '2009-04-19T24:01:00.000000', # Has non-zero minutes on 24:00 + '2009-04-32T24:00:00.000000', # Day is invalid before wrapping due to 24:00 + '2009-13-01T24:00:00.000000', # Month is invalid before wrapping due to 24:00 + '9999-12-31T24:00:00.000000', # Year is invalid after wrapping due to 24:00 ] for bad_str in bad_strs: @@ -3740,6 +3829,78 @@ def test_compat_unpickle(self): derived = loads(data, encoding='latin1') self.assertEqual(derived, expected) + def test_strptime(self): + # bpo-34482: Check that surrogates are handled properly. + inputs = [ + (self.theclass(13, 2, 47, 197000), '13:02:47.197', '%H:%M:%S.%f'), + (self.theclass(13, 2, 47, 197000), '13:02\ud80047.197', '%H:%M\ud800%S.%f'), + (self.theclass(13, 2, 47, 197000), '13\ud80002:47.197', '%H\ud800%M:%S.%f'), + ] + for expected, string, format in inputs: + with self.subTest(string=string, format=format): + got = self.theclass.strptime(string, format) + self.assertEqual(expected, got) + self.assertIs(type(got), self.theclass) + + def test_strptime_tz(self): + strptime = self.theclass.strptime + self.assertEqual(strptime("+0002", "%z").utcoffset(), 2 * MINUTE) + self.assertEqual(strptime("-0002", "%z").utcoffset(), -2 * MINUTE) + self.assertEqual( + strptime("-00:02:01.000003", "%z").utcoffset(), + -timedelta(minutes=2, seconds=1, microseconds=3) + ) + # Only local timezone and UTC are supported + for tzseconds, tzname in ((0, 'UTC'), (0, 'GMT'), + (-_time.timezone, _time.tzname[0])): + if tzseconds < 0: + sign = '-' + seconds = -tzseconds + else: + sign ='+' + seconds = tzseconds + hours, minutes = divmod(seconds//60, 60) + tstr = "{}{:02d}{:02d} {}".format(sign, hours, minutes, tzname) + with self.subTest(tstr=tstr): + t = strptime(tstr, "%z %Z") + self.assertEqual(t.utcoffset(), timedelta(seconds=tzseconds)) + self.assertEqual(t.tzname(), tzname) + self.assertIs(type(t), self.theclass) + + # Can produce inconsistent time + tstr, fmt = "+1234 UTC", "%z %Z" + t = strptime(tstr, fmt) + self.assertEqual(t.utcoffset(), 12 * HOUR + 34 * MINUTE) + self.assertEqual(t.tzname(), 'UTC') + # yet will roundtrip + self.assertEqual(t.strftime(fmt), tstr) + + # Produce naive time if no %z is provided + self.assertEqual(strptime("UTC", "%Z").tzinfo, None) + + def test_strptime_errors(self): + for tzstr in ("-2400", "-000", "z"): + with self.assertRaises(ValueError): + self.theclass.strptime(tzstr, "%z") + + def test_strptime_single_digit(self): + # bpo-34903: Check that single digit times are allowed. + t = self.theclass(4, 5, 6) + inputs = [ + ('%H', '4:05:06', '%H:%M:%S', t), + ('%M', '04:5:06', '%H:%M:%S', t), + ('%S', '04:05:6', '%H:%M:%S', t), + ('%I', '4am:05:06', '%I%p:%M:%S', t), + ] + for reason, string, format, target in inputs: + reason = 'test single digit ' + reason + with self.subTest(reason=reason, + string=string, + format=format, + target=target): + newdate = self.theclass.strptime(string, format) + self.assertEqual(newdate, target, msg=reason) + def test_bool(self): # time is always True. cls = self.theclass @@ -4312,7 +4473,7 @@ def test_fromisoformat_timezone(self): with self.subTest(tstr=tstr): t_rt = self.theclass.fromisoformat(tstr) - assert t == t_rt, t_rt + assert t == t_rt def test_fromisoformat_timespecs(self): time_bases = [ diff --git a/Lib/test/libregrtest/utils.py b/Lib/test/libregrtest/utils.py index 7dcaf085a7ca91..d6be4ad049d14a 100644 --- a/Lib/test/libregrtest/utils.py +++ b/Lib/test/libregrtest/utils.py @@ -300,29 +300,78 @@ def get_build_info(): config_args = sysconfig.get_config_var('CONFIG_ARGS') or '' cflags = sysconfig.get_config_var('PY_CFLAGS') or '' - cflags_nodist = sysconfig.get_config_var('PY_CFLAGS_NODIST') or '' + cflags += ' ' + (sysconfig.get_config_var('PY_CFLAGS_NODIST') or '') ldflags_nodist = sysconfig.get_config_var('PY_LDFLAGS_NODIST') or '' build = [] # --disable-gil if sysconfig.get_config_var('Py_GIL_DISABLED'): - build.append("free_threading") + if not sys.flags.ignore_environment: + PYTHON_GIL = os.environ.get('PYTHON_GIL', None) + if PYTHON_GIL: + PYTHON_GIL = (PYTHON_GIL == '1') + else: + PYTHON_GIL = None + + free_threading = "free_threading" + if PYTHON_GIL is not None: + free_threading = f"{free_threading} GIL={int(PYTHON_GIL)}" + build.append(free_threading) if hasattr(sys, 'gettotalrefcount'): # --with-pydebug build.append('debug') - if '-DNDEBUG' in (cflags + cflags_nodist): + if '-DNDEBUG' in cflags: build.append('without_assert') else: build.append('release') if '--with-assertions' in config_args: build.append('with_assert') - elif '-DNDEBUG' not in (cflags + cflags_nodist): + elif '-DNDEBUG' not in cflags: build.append('with_assert') + # --enable-experimental-jit + tier2 = re.search('-D_Py_TIER2=([0-9]+)', cflags) + if tier2: + tier2 = int(tier2.group(1)) + + if not sys.flags.ignore_environment: + PYTHON_JIT = os.environ.get('PYTHON_JIT', None) + if PYTHON_JIT: + PYTHON_JIT = (PYTHON_JIT != '0') + else: + PYTHON_JIT = None + + if tier2 == 1: # =yes + if PYTHON_JIT == False: + jit = 'JIT=off' + else: + jit = 'JIT' + elif tier2 == 3: # =yes-off + if PYTHON_JIT: + jit = 'JIT' + else: + jit = 'JIT=off' + elif tier2 == 4: # =interpreter + if PYTHON_JIT == False: + jit = 'JIT-interpreter=off' + else: + jit = 'JIT-interpreter' + elif tier2 == 6: # =interpreter-off (Secret option!) + if PYTHON_JIT: + jit = 'JIT-interpreter' + else: + jit = 'JIT-interpreter=off' + elif '-D_Py_JIT' in cflags: + jit = 'JIT' + else: + jit = None + if jit: + build.append(jit) + # --enable-framework=name framework = sysconfig.get_config_var('PYTHONFRAMEWORK') if framework: diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 628529b8664c77..99cb10fc7b5f7b 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -2209,7 +2209,15 @@ def skip_if_broken_multiprocessing_synchronize(): # bpo-38377: On Linux, creating a semaphore fails with OSError # if the current user does not have the permission to create # a file in /dev/shm/ directory. - synchronize.Lock(ctx=None) + import multiprocessing + synchronize.Lock(ctx=multiprocessing.get_context('fork')) + # The explicit fork mp context is required in order for + # TestResourceTracker.test_resource_tracker_reused to work. + # synchronize creates a new multiprocessing.resource_tracker + # process at module import time via the above call in that + # scenario. Awkward. This enables gh-84559. No code involved + # should have threads at that point so fork() should be safe. + except OSError as exc: raise unittest.SkipTest(f"broken multiprocessing SemLock: {exc!r}") diff --git a/Lib/test/test__interpreters.py b/Lib/test/test__interpreters.py index f493a92e0ddce8..14cd50bd30502c 100644 --- a/Lib/test/test__interpreters.py +++ b/Lib/test/test__interpreters.py @@ -849,7 +849,6 @@ def test_execution_namespace_is_main(self): ns.pop('__loader__') self.assertEqual(ns, { '__name__': '__main__', - '__annotations__': {}, '__doc__': None, '__package__': None, '__spec__': None, diff --git a/Lib/test/test_annotationlib.py b/Lib/test/test_annotationlib.py index dd8ceb55a411fb..eedf2506a14912 100644 --- a/Lib/test/test_annotationlib.py +++ b/Lib/test/test_annotationlib.py @@ -1,12 +1,20 @@ """Tests for the annotations module.""" import annotationlib +import builtins import collections import functools import itertools import pickle import unittest -from annotationlib import Format, ForwardRef, get_annotations, get_annotate_function +from annotationlib import ( + Format, + ForwardRef, + get_annotations, + get_annotate_function, + annotations_to_string, + value_to_string, +) from typing import Unpack from test import support @@ -24,16 +32,21 @@ def wrapper(a, b): return wrapper +class MyClass: + def __repr__(self): + return "my repr" + + class TestFormat(unittest.TestCase): def test_enum(self): - self.assertEqual(annotationlib.Format.VALUE.value, 1) - self.assertEqual(annotationlib.Format.VALUE, 1) + self.assertEqual(Format.VALUE.value, 1) + self.assertEqual(Format.VALUE, 1) - self.assertEqual(annotationlib.Format.FORWARDREF.value, 2) - self.assertEqual(annotationlib.Format.FORWARDREF, 2) + self.assertEqual(Format.FORWARDREF.value, 2) + self.assertEqual(Format.FORWARDREF, 2) - self.assertEqual(annotationlib.Format.SOURCE.value, 3) - self.assertEqual(annotationlib.Format.SOURCE, 3) + self.assertEqual(Format.STRING.value, 3) + self.assertEqual(Format.STRING, 3) class TestForwardRefFormat(unittest.TestCase): @@ -41,9 +54,7 @@ def test_closure(self): def inner(arg: x): pass - anno = annotationlib.get_annotations( - inner, format=annotationlib.Format.FORWARDREF - ) + anno = annotationlib.get_annotations(inner, format=Format.FORWARDREF) fwdref = anno["arg"] self.assertIsInstance(fwdref, annotationlib.ForwardRef) self.assertEqual(fwdref.__forward_arg__, "x") @@ -53,16 +64,14 @@ def inner(arg: x): x = 1 self.assertEqual(fwdref.evaluate(), x) - anno = annotationlib.get_annotations( - inner, format=annotationlib.Format.FORWARDREF - ) + anno = annotationlib.get_annotations(inner, format=Format.FORWARDREF) self.assertEqual(anno["arg"], x) def test_function(self): def f(x: int, y: doesntexist): pass - anno = annotationlib.get_annotations(f, format=annotationlib.Format.FORWARDREF) + anno = annotationlib.get_annotations(f, format=Format.FORWARDREF) self.assertIs(anno["x"], int) fwdref = anno["y"] self.assertIsInstance(fwdref, annotationlib.ForwardRef) @@ -79,14 +88,14 @@ def test_closure(self): def inner(arg: x): pass - anno = annotationlib.get_annotations(inner, format=annotationlib.Format.SOURCE) + anno = annotationlib.get_annotations(inner, format=Format.STRING) self.assertEqual(anno, {"arg": "x"}) def test_function(self): def f(x: int, y: doesntexist): pass - anno = annotationlib.get_annotations(f, format=annotationlib.Format.SOURCE) + anno = annotationlib.get_annotations(f, format=Format.STRING) self.assertEqual(anno, {"x": "int", "y": "doesntexist"}) def test_expressions(self): @@ -120,7 +129,7 @@ def f( ): pass - anno = annotationlib.get_annotations(f, format=annotationlib.Format.SOURCE) + anno = annotationlib.get_annotations(f, format=Format.STRING) self.assertEqual( anno, { @@ -171,7 +180,7 @@ def f( ): pass - anno = annotationlib.get_annotations(f, format=annotationlib.Format.SOURCE) + anno = annotationlib.get_annotations(f, format=Format.STRING) self.assertEqual( anno, { @@ -205,7 +214,7 @@ def f( ): pass - anno = annotationlib.get_annotations(f, format=annotationlib.Format.SOURCE) + anno = annotationlib.get_annotations(f, format=Format.STRING) self.assertEqual( anno, { @@ -228,13 +237,13 @@ def f(fstring: f"{a}"): pass with self.assertRaisesRegex(TypeError, format_msg): - annotationlib.get_annotations(f, format=annotationlib.Format.SOURCE) + annotationlib.get_annotations(f, format=Format.STRING) def f(fstring_format: f"{a:02d}"): pass with self.assertRaisesRegex(TypeError, format_msg): - annotationlib.get_annotations(f, format=annotationlib.Format.SOURCE) + annotationlib.get_annotations(f, format=Format.STRING) class TestForwardRefClass(unittest.TestCase): @@ -263,7 +272,7 @@ class Gen[T]: with self.assertRaises(NameError): ForwardRef("T").evaluate(owner=int) - T, = Gen.__type_params__ + (T,) = Gen.__type_params__ self.assertIs(ForwardRef("T").evaluate(type_params=Gen.__type_params__), T) self.assertIs(ForwardRef("T").evaluate(owner=Gen), T) @@ -280,7 +289,13 @@ class Gen[T]: def test_fwdref_with_module(self): self.assertIs(ForwardRef("Format", module="annotationlib").evaluate(), Format) - self.assertIs(ForwardRef("Counter", module="collections").evaluate(), collections.Counter) + self.assertIs( + ForwardRef("Counter", module="collections").evaluate(), collections.Counter + ) + self.assertEqual( + ForwardRef("Counter[int]", module="collections").evaluate(), + collections.Counter[int], + ) with self.assertRaises(NameError): # If globals are passed explicitly, we don't look at the module dict @@ -305,6 +320,36 @@ def test_fwdref_value_is_cached(self): self.assertIs(fr.evaluate(globals={"hello": str}), str) self.assertIs(fr.evaluate(), str) + def test_fwdref_with_owner(self): + self.assertEqual( + ForwardRef("Counter[int]", owner=collections).evaluate(), + collections.Counter[int], + ) + + def test_name_lookup_without_eval(self): + # test the codepath where we look up simple names directly in the + # namespaces without going through eval() + self.assertIs(ForwardRef("int").evaluate(), int) + self.assertIs(ForwardRef("int").evaluate(locals={"int": str}), str) + self.assertIs( + ForwardRef("int").evaluate(locals={"int": float}, globals={"int": str}), + float, + ) + self.assertIs(ForwardRef("int").evaluate(globals={"int": str}), str) + with support.swap_attr(builtins, "int", dict): + self.assertIs(ForwardRef("int").evaluate(), dict) + + with self.assertRaises(NameError): + ForwardRef("doesntexist").evaluate() + + def test_fwdref_invalid_syntax(self): + fr = ForwardRef("if") + with self.assertRaises(SyntaxError): + fr.evaluate() + fr = ForwardRef("1+") + with self.assertRaises(SyntaxError): + fr.evaluate() + class TestGetAnnotations(unittest.TestCase): def test_builtin_type(self): @@ -333,22 +378,20 @@ class C1(metaclass=NoDict): self.assertEqual(annotationlib.get_annotations(C1), {"a": int}) self.assertEqual( - annotationlib.get_annotations(C1, format=annotationlib.Format.FORWARDREF), + annotationlib.get_annotations(C1, format=Format.FORWARDREF), {"a": int}, ) self.assertEqual( - annotationlib.get_annotations(C1, format=annotationlib.Format.SOURCE), + annotationlib.get_annotations(C1, format=Format.STRING), {"a": "int"}, ) self.assertEqual(annotationlib.get_annotations(NoDict), {"b": str}) self.assertEqual( - annotationlib.get_annotations( - NoDict, format=annotationlib.Format.FORWARDREF - ), + annotationlib.get_annotations(NoDict, format=Format.FORWARDREF), {"b": str}, ) self.assertEqual( - annotationlib.get_annotations(NoDict, format=annotationlib.Format.SOURCE), + annotationlib.get_annotations(NoDict, format=Format.STRING), {"b": "str"}, ) @@ -360,20 +403,20 @@ def f2(a: undefined): pass self.assertEqual( - annotationlib.get_annotations(f1, format=annotationlib.Format.VALUE), + annotationlib.get_annotations(f1, format=Format.VALUE), {"a": int}, ) self.assertEqual(annotationlib.get_annotations(f1, format=1), {"a": int}) fwd = annotationlib.ForwardRef("undefined") self.assertEqual( - annotationlib.get_annotations(f2, format=annotationlib.Format.FORWARDREF), + annotationlib.get_annotations(f2, format=Format.FORWARDREF), {"a": fwd}, ) self.assertEqual(annotationlib.get_annotations(f2, format=2), {"a": fwd}) self.assertEqual( - annotationlib.get_annotations(f1, format=annotationlib.Format.SOURCE), + annotationlib.get_annotations(f1, format=Format.STRING), {"a": "int"}, ) self.assertEqual(annotationlib.get_annotations(f1, format=3), {"a": "int"}) @@ -396,30 +439,26 @@ def foo(): pass with self.assertRaises(ValueError): - annotationlib.get_annotations( - foo, format=annotationlib.Format.FORWARDREF, eval_str=True - ) - annotationlib.get_annotations( - foo, format=annotationlib.Format.SOURCE, eval_str=True - ) + annotationlib.get_annotations(foo, format=Format.FORWARDREF, eval_str=True) + annotationlib.get_annotations(foo, format=Format.STRING, eval_str=True) def test_stock_annotations(self): def foo(a: int, b: str): pass - for format in (annotationlib.Format.VALUE, annotationlib.Format.FORWARDREF): + for format in (Format.VALUE, Format.FORWARDREF): with self.subTest(format=format): self.assertEqual( annotationlib.get_annotations(foo, format=format), {"a": int, "b": str}, ) self.assertEqual( - annotationlib.get_annotations(foo, format=annotationlib.Format.SOURCE), + annotationlib.get_annotations(foo, format=Format.STRING), {"a": "int", "b": "str"}, ) foo.__annotations__ = {"a": "foo", "b": "str"} - for format in annotationlib.Format: + for format in Format: with self.subTest(format=format): self.assertEqual( annotationlib.get_annotations(foo, format=format), @@ -441,10 +480,10 @@ def test_stock_annotations_in_module(self): for kwargs in [ {}, {"eval_str": False}, - {"format": annotationlib.Format.VALUE}, - {"format": annotationlib.Format.FORWARDREF}, - {"format": annotationlib.Format.VALUE, "eval_str": False}, - {"format": annotationlib.Format.FORWARDREF, "eval_str": False}, + {"format": Format.VALUE}, + {"format": Format.FORWARDREF}, + {"format": Format.VALUE, "eval_str": False}, + {"format": Format.FORWARDREF, "eval_str": False}, ]: with self.subTest(**kwargs): self.assertEqual( @@ -479,7 +518,7 @@ def test_stock_annotations_in_module(self): for kwargs in [ {"eval_str": True}, - {"format": annotationlib.Format.VALUE, "eval_str": True}, + {"format": Format.VALUE, "eval_str": True}, ]: with self.subTest(**kwargs): self.assertEqual( @@ -513,48 +552,36 @@ def test_stock_annotations_in_module(self): ) self.assertEqual( - annotationlib.get_annotations(isa, format=annotationlib.Format.SOURCE), + annotationlib.get_annotations(isa, format=Format.STRING), {"a": "int", "b": "str"}, ) self.assertEqual( - annotationlib.get_annotations( - isa.MyClass, format=annotationlib.Format.SOURCE - ), + annotationlib.get_annotations(isa.MyClass, format=Format.STRING), {"a": "int", "b": "str"}, ) self.assertEqual( - annotationlib.get_annotations( - isa.function, format=annotationlib.Format.SOURCE - ), + annotationlib.get_annotations(isa.function, format=Format.STRING), {"a": "int", "b": "str", "return": "MyClass"}, ) self.assertEqual( - annotationlib.get_annotations( - isa.function2, format=annotationlib.Format.SOURCE - ), + annotationlib.get_annotations(isa.function2, format=Format.STRING), {"a": "int", "b": "str", "c": "MyClass", "return": "MyClass"}, ) self.assertEqual( - annotationlib.get_annotations( - isa.function3, format=annotationlib.Format.SOURCE - ), + annotationlib.get_annotations(isa.function3, format=Format.STRING), {"a": "int", "b": "str", "c": "MyClass"}, ) self.assertEqual( - annotationlib.get_annotations( - annotationlib, format=annotationlib.Format.SOURCE - ), + annotationlib.get_annotations(annotationlib, format=Format.STRING), {}, ) self.assertEqual( - annotationlib.get_annotations( - isa.UnannotatedClass, format=annotationlib.Format.SOURCE - ), + annotationlib.get_annotations(isa.UnannotatedClass, format=Format.STRING), {}, ) self.assertEqual( annotationlib.get_annotations( - isa.unannotated_function, format=annotationlib.Format.SOURCE + isa.unannotated_function, format=Format.STRING ), {}, ) @@ -570,13 +597,11 @@ def test_stock_annotations_on_wrapper(self): {"a": int, "b": str, "return": isa.MyClass}, ) self.assertEqual( - annotationlib.get_annotations( - wrapped, format=annotationlib.Format.FORWARDREF - ), + annotationlib.get_annotations(wrapped, format=Format.FORWARDREF), {"a": int, "b": str, "return": isa.MyClass}, ) self.assertEqual( - annotationlib.get_annotations(wrapped, format=annotationlib.Format.SOURCE), + annotationlib.get_annotations(wrapped, format=Format.STRING), {"a": "int", "b": "str", "return": "MyClass"}, ) self.assertEqual( @@ -593,12 +618,12 @@ def test_stringized_annotations_in_module(self): for kwargs in [ {}, {"eval_str": False}, - {"format": annotationlib.Format.VALUE}, - {"format": annotationlib.Format.FORWARDREF}, - {"format": annotationlib.Format.SOURCE}, - {"format": annotationlib.Format.VALUE, "eval_str": False}, - {"format": annotationlib.Format.FORWARDREF, "eval_str": False}, - {"format": annotationlib.Format.SOURCE, "eval_str": False}, + {"format": Format.VALUE}, + {"format": Format.FORWARDREF}, + {"format": Format.STRING}, + {"format": Format.VALUE, "eval_str": False}, + {"format": Format.FORWARDREF, "eval_str": False}, + {"format": Format.STRING, "eval_str": False}, ]: with self.subTest(**kwargs): self.assertEqual( @@ -631,7 +656,7 @@ def test_stringized_annotations_in_module(self): for kwargs in [ {"eval_str": True}, - {"format": annotationlib.Format.VALUE, "eval_str": True}, + {"format": Format.VALUE, "eval_str": True}, ]: with self.subTest(**kwargs): self.assertEqual( @@ -705,17 +730,96 @@ def f(x: int): self.assertEqual(annotationlib.get_annotations(f), {"x": int}) self.assertEqual( - annotationlib.get_annotations(f, format=annotationlib.Format.FORWARDREF), + annotationlib.get_annotations(f, format=Format.FORWARDREF), {"x": int}, ) f.__annotations__["x"] = str # The modification is reflected in VALUE (the default) self.assertEqual(annotationlib.get_annotations(f), {"x": str}) - # ... but not in FORWARDREF, which uses __annotate__ + # ... and also in FORWARDREF, which tries __annotations__ if available self.assertEqual( - annotationlib.get_annotations(f, format=annotationlib.Format.FORWARDREF), - {"x": int}, + annotationlib.get_annotations(f, format=Format.FORWARDREF), + {"x": str}, + ) + # ... but not in STRING which always uses __annotate__ + self.assertEqual( + annotationlib.get_annotations(f, format=Format.STRING), + {"x": "int"}, + ) + + def test_non_dict_annotations(self): + class WeirdAnnotations: + @property + def __annotations__(self): + return "not a dict" + + wa = WeirdAnnotations() + for format in Format: + with ( + self.subTest(format=format), + self.assertRaisesRegex( + ValueError, r".*__annotations__ is neither a dict nor None" + ), + ): + annotationlib.get_annotations(wa, format=format) + + def test_annotations_on_custom_object(self): + class HasAnnotations: + @property + def __annotations__(self): + return {"x": int} + + ha = HasAnnotations() + self.assertEqual( + annotationlib.get_annotations(ha, format=Format.VALUE), {"x": int} + ) + self.assertEqual( + annotationlib.get_annotations(ha, format=Format.FORWARDREF), {"x": int} + ) + + self.assertEqual( + annotationlib.get_annotations(ha, format=Format.STRING), {"x": "int"} + ) + + def test_raising_annotations_on_custom_object(self): + class HasRaisingAnnotations: + @property + def __annotations__(self): + return {"x": undefined} + + hra = HasRaisingAnnotations() + + with self.assertRaises(NameError): + annotationlib.get_annotations(hra, format=Format.VALUE) + + with self.assertRaises(NameError): + annotationlib.get_annotations(hra, format=Format.FORWARDREF) + + undefined = float + self.assertEqual( + annotationlib.get_annotations(hra, format=Format.VALUE), {"x": float} + ) + + def test_forwardref_prefers_annotations(self): + class HasBoth: + @property + def __annotations__(self): + return {"x": int} + + @property + def __annotate__(self): + return lambda format: {"x": str} + + hb = HasBoth() + self.assertEqual( + annotationlib.get_annotations(hb, format=Format.VALUE), {"x": int} + ) + self.assertEqual( + annotationlib.get_annotations(hb, format=Format.FORWARDREF), {"x": int} + ) + self.assertEqual( + annotationlib.get_annotations(hb, format=Format.STRING), {"x": str} ) def test_pep695_generic_class_with_future_annotations(self): @@ -845,15 +949,13 @@ def evaluate(format, exc=NotImplementedError): return undefined with self.assertRaises(NameError): - annotationlib.call_evaluate_function(evaluate, annotationlib.Format.VALUE) + annotationlib.call_evaluate_function(evaluate, Format.VALUE) self.assertEqual( - annotationlib.call_evaluate_function( - evaluate, annotationlib.Format.FORWARDREF - ), + annotationlib.call_evaluate_function(evaluate, Format.FORWARDREF), annotationlib.ForwardRef("undefined"), ) self.assertEqual( - annotationlib.call_evaluate_function(evaluate, annotationlib.Format.SOURCE), + annotationlib.call_evaluate_function(evaluate, Format.STRING), "undefined", ) @@ -963,6 +1065,29 @@ class C: self.assertEqual(get_annotate_function(C)(Format.VALUE), {"a": int}) +class TestToSource(unittest.TestCase): + def test_value_to_string(self): + self.assertEqual(value_to_string(int), "int") + self.assertEqual(value_to_string(MyClass), "test.test_annotationlib.MyClass") + self.assertEqual(value_to_string(len), "len") + self.assertEqual(value_to_string(value_to_string), "value_to_string") + self.assertEqual(value_to_string(times_three), "times_three") + self.assertEqual(value_to_string(...), "...") + self.assertEqual(value_to_string(None), "None") + self.assertEqual(value_to_string(1), "1") + self.assertEqual(value_to_string("1"), "'1'") + self.assertEqual(value_to_string(Format.VALUE), repr(Format.VALUE)) + self.assertEqual(value_to_string(MyClass()), "my repr") + + def test_annotations_to_string(self): + self.assertEqual(annotations_to_string({}), {}) + self.assertEqual(annotations_to_string({"x": int}), {"x": "int"}) + self.assertEqual(annotations_to_string({"x": "int"}), {"x": "int"}) + self.assertEqual( + annotations_to_string({"x": int, "y": str}), {"x": "int", "y": "str"} + ) + + class TestAnnotationLib(unittest.TestCase): def test__all__(self): support.check__all__(self, annotationlib) diff --git a/Lib/test/test_argparse.py b/Lib/test/test_argparse.py index ef05a6fefcffcc..a972ed0cc9053b 100644 --- a/Lib/test/test_argparse.py +++ b/Lib/test/test_argparse.py @@ -380,15 +380,22 @@ class TestOptionalsSingleDashAmbiguous(ParserTestCase): """Test Optionals that partially match but are not subsets""" argument_signatures = [Sig('-foobar'), Sig('-foorab')] - failures = ['-f', '-f a', '-fa', '-foa', '-foo', '-fo', '-foo b'] + failures = ['-f', '-f a', '-fa', '-foa', '-foo', '-fo', '-foo b', + '-f=a', '-foo=b'] successes = [ ('', NS(foobar=None, foorab=None)), ('-foob a', NS(foobar='a', foorab=None)), + ('-foob=a', NS(foobar='a', foorab=None)), ('-foor a', NS(foobar=None, foorab='a')), + ('-foor=a', NS(foobar=None, foorab='a')), ('-fooba a', NS(foobar='a', foorab=None)), + ('-fooba=a', NS(foobar='a', foorab=None)), ('-foora a', NS(foobar=None, foorab='a')), + ('-foora=a', NS(foobar=None, foorab='a')), ('-foobar a', NS(foobar='a', foorab=None)), + ('-foobar=a', NS(foobar='a', foorab=None)), ('-foorab a', NS(foobar=None, foorab='a')), + ('-foorab=a', NS(foobar=None, foorab='a')), ] @@ -621,9 +628,9 @@ class TestOptionalsNargsOptional(ParserTestCase): Sig('-w', nargs='?'), Sig('-x', nargs='?', const=42), Sig('-y', nargs='?', default='spam'), - Sig('-z', nargs='?', type=int, const='42', default='84'), + Sig('-z', nargs='?', type=int, const='42', default='84', choices=[1, 2]), ] - failures = ['2'] + failures = ['2', '-z a', '-z 42', '-z 84'] successes = [ ('', NS(w=None, x=None, y='spam', z=84)), ('-w', NS(w=None, x=None, y='spam', z=84)), @@ -679,7 +686,7 @@ class TestOptionalsChoices(ParserTestCase): argument_signatures = [ Sig('-f', choices='abc'), Sig('-g', type=int, choices=range(5))] - failures = ['a', '-f d', '-fad', '-ga', '-g 6'] + failures = ['a', '-f d', '-f ab', '-fad', '-ga', '-g 6'] successes = [ ('', NS(f=None, g=None)), ('-f a', NS(f='a', g=None)), @@ -875,7 +882,9 @@ class TestOptionalsAllowLongAbbreviation(ParserTestCase): successes = [ ('', NS(foo=None, foobaz=None, fooble=False)), ('--foo 7', NS(foo='7', foobaz=None, fooble=False)), + ('--foo=7', NS(foo='7', foobaz=None, fooble=False)), ('--fooba a', NS(foo=None, foobaz='a', fooble=False)), + ('--fooba=a', NS(foo=None, foobaz='a', fooble=False)), ('--foobl --foo g', NS(foo='g', foobaz=None, fooble=True)), ] @@ -914,6 +923,23 @@ class TestOptionalsDisallowLongAbbreviationPrefixChars(ParserTestCase): ] +class TestOptionalsDisallowSingleDashLongAbbreviation(ParserTestCase): + """Do not allow abbreviations of long options at all""" + + parser_signature = Sig(allow_abbrev=False) + argument_signatures = [ + Sig('-foo'), + Sig('-foodle', action='store_true'), + Sig('-foonly'), + ] + failures = ['-foon 3', '-food', '-food -foo 2'] + successes = [ + ('', NS(foo=None, foodle=False, foonly=None)), + ('-foo 3', NS(foo='3', foodle=False, foonly=None)), + ('-foonly 7 -foodle -foo 2', NS(foo='2', foodle=True, foonly='7')), + ] + + class TestDisallowLongAbbreviationAllowsShortGrouping(ParserTestCase): """Do not allow abbreviations of long options at all""" @@ -1001,8 +1027,8 @@ class TestPositionalsNargsZeroOrMore(ParserTestCase): class TestPositionalsNargsZeroOrMoreDefault(ParserTestCase): """Test a Positional that specifies unlimited nargs and a default""" - argument_signatures = [Sig('foo', nargs='*', default='bar')] - failures = ['-x'] + argument_signatures = [Sig('foo', nargs='*', default='bar', choices=['a', 'b'])] + failures = ['-x', 'bar', 'a c'] successes = [ ('', NS(foo='bar')), ('a', NS(foo=['a'])), @@ -1035,8 +1061,8 @@ class TestPositionalsNargsOptional(ParserTestCase): class TestPositionalsNargsOptionalDefault(ParserTestCase): """Tests an Optional Positional with a default value""" - argument_signatures = [Sig('foo', nargs='?', default=42)] - failures = ['-x', 'a b'] + argument_signatures = [Sig('foo', nargs='?', default=42, choices=['a', 'b'])] + failures = ['-x', 'a b', '42'] successes = [ ('', NS(foo=42)), ('a', NS(foo='a')), @@ -1049,9 +1075,9 @@ class TestPositionalsNargsOptionalConvertedDefault(ParserTestCase): """ argument_signatures = [ - Sig('foo', nargs='?', type=int, default='42'), + Sig('foo', nargs='?', type=int, default='42', choices=[1, 2]), ] - failures = ['-x', 'a b', '1 2'] + failures = ['-x', 'a b', '1 2', '42'] successes = [ ('', NS(foo=42)), ('1', NS(foo=1)), @@ -1570,18 +1596,24 @@ class TestDefaultSuppress(ParserTestCase): """Test actions with suppressed defaults""" argument_signatures = [ - Sig('foo', nargs='?', default=argparse.SUPPRESS), - Sig('bar', nargs='*', default=argparse.SUPPRESS), + Sig('foo', nargs='?', type=int, default=argparse.SUPPRESS), + Sig('bar', nargs='*', type=int, default=argparse.SUPPRESS), Sig('--baz', action='store_true', default=argparse.SUPPRESS), + Sig('--qux', nargs='?', type=int, default=argparse.SUPPRESS), + Sig('--quux', nargs='*', type=int, default=argparse.SUPPRESS), ] - failures = ['-x'] + failures = ['-x', 'a', '1 a'] successes = [ ('', NS()), - ('a', NS(foo='a')), - ('a b', NS(foo='a', bar=['b'])), + ('1', NS(foo=1)), + ('1 2', NS(foo=1, bar=[2])), ('--baz', NS(baz=True)), - ('a --baz', NS(foo='a', baz=True)), - ('--baz a b', NS(foo='a', bar=['b'], baz=True)), + ('1 --baz', NS(foo=1, baz=True)), + ('--baz 1 2', NS(foo=1, bar=[2], baz=True)), + ('--qux', NS(qux=None)), + ('--qux 1', NS(qux=1)), + ('--quux', NS(quux=[])), + ('--quux 1 2', NS(quux=[1, 2])), ] @@ -2238,14 +2270,14 @@ def _get_parser(self, subparser_help=False, prefix_chars=None, parser1_kwargs['aliases'] = ['1alias1', '1alias2'] parser1 = subparsers.add_parser('1', **parser1_kwargs) parser1.add_argument('-w', type=int, help='w help') - parser1.add_argument('x', choices='abc', help='x help') + parser1.add_argument('x', choices=['a', 'b', 'c'], help='x help') # add second sub-parser parser2_kwargs = dict(description='2 description') if subparser_help: parser2_kwargs['help'] = '2 help' parser2 = subparsers.add_parser('2', **parser2_kwargs) - parser2.add_argument('-y', choices='123', help='y help') + parser2.add_argument('-y', choices=['1', '2', '3'], help='y help') parser2.add_argument('z', type=complex, nargs='*', help='z help') # add third sub-parser @@ -2312,6 +2344,40 @@ def test_parse_known_args(self): (NS(foo=False, bar=0.5, w=7, x='b'), ['-W', '-X', 'Y', 'Z']), ) + def test_parse_known_args_to_class_namespace(self): + class C: + pass + self.assertEqual( + self.parser.parse_known_args('0.5 1 b -w 7 -p'.split(), namespace=C), + (C, ['-p']), + ) + self.assertIs(C.foo, False) + self.assertEqual(C.bar, 0.5) + self.assertEqual(C.w, 7) + self.assertEqual(C.x, 'b') + + def test_abbreviation(self): + parser = ErrorRaisingArgumentParser() + parser.add_argument('--foodle') + parser.add_argument('--foonly') + subparsers = parser.add_subparsers() + parser1 = subparsers.add_parser('bar') + parser1.add_argument('--fo') + parser1.add_argument('--foonew') + + self.assertEqual(parser.parse_args(['--food', 'baz', 'bar']), + NS(foodle='baz', foonly=None, fo=None, foonew=None)) + self.assertEqual(parser.parse_args(['--foon', 'baz', 'bar']), + NS(foodle=None, foonly='baz', fo=None, foonew=None)) + self.assertArgumentParserError(parser.parse_args, ['--fo', 'baz', 'bar']) + self.assertEqual(parser.parse_args(['bar', '--fo', 'baz']), + NS(foodle=None, foonly=None, fo='baz', foonew=None)) + self.assertEqual(parser.parse_args(['bar', '--foo', 'baz']), + NS(foodle=None, foonly=None, fo=None, foonew='baz')) + self.assertEqual(parser.parse_args(['bar', '--foon', 'baz']), + NS(foodle=None, foonly=None, fo=None, foonew='baz')) + self.assertArgumentParserError(parser.parse_args, ['bar', '--food', 'baz']) + def test_parse_known_args_with_single_dash_option(self): parser = ErrorRaisingArgumentParser() parser.add_argument('-k', '--known', action='count', default=0) @@ -4586,7 +4652,7 @@ class TestHelpVariableExpansion(HelpTestCase): help='x %(prog)s %(default)s %(type)s %%'), Sig('-y', action='store_const', default=42, const='XXX', help='y %(prog)s %(default)s %(const)s'), - Sig('--foo', choices='abc', + Sig('--foo', choices=['a', 'b', 'c'], help='foo %(prog)s %(default)s %(choices)s'), Sig('--bar', default='baz', choices=[1, 2], metavar='BBB', help='bar %(prog)s %(default)s %(dest)s'), @@ -5249,7 +5315,7 @@ def test_no_argument_actions(self): for action in ['store_const', 'store_true', 'store_false', 'append_const', 'count']: for attrs in [dict(type=int), dict(nargs='+'), - dict(choices='ab')]: + dict(choices=['a', 'b'])]: self.assertTypeError('-x', action=action, **attrs) def test_no_argument_no_const_actions(self): diff --git a/Lib/test/test_asyncio/test_eager_task_factory.py b/Lib/test/test_asyncio/test_eager_task_factory.py index 0777f39b572486..1579ad1188d725 100644 --- a/Lib/test/test_asyncio/test_eager_task_factory.py +++ b/Lib/test/test_asyncio/test_eager_task_factory.py @@ -213,6 +213,53 @@ async def run(): self.run_coro(run()) + def test_staggered_race_with_eager_tasks(self): + # See https://github.com/python/cpython/issues/124309 + + async def fail(): + await asyncio.sleep(0) + raise ValueError("no good") + + async def run(): + winner, index, excs = await asyncio.staggered.staggered_race( + [ + lambda: asyncio.sleep(2, result="sleep2"), + lambda: asyncio.sleep(1, result="sleep1"), + lambda: fail() + ], + delay=0.25 + ) + self.assertEqual(winner, 'sleep1') + self.assertEqual(index, 1) + self.assertIsNone(excs[index]) + self.assertIsInstance(excs[0], asyncio.CancelledError) + self.assertIsInstance(excs[2], ValueError) + + self.run_coro(run()) + + def test_staggered_race_with_eager_tasks_no_delay(self): + # See https://github.com/python/cpython/issues/124309 + async def fail(): + raise ValueError("no good") + + async def run(): + winner, index, excs = await asyncio.staggered.staggered_race( + [ + lambda: fail(), + lambda: asyncio.sleep(1, result="sleep1"), + lambda: asyncio.sleep(0, result="sleep0"), + ], + delay=None + ) + self.assertEqual(winner, 'sleep1') + self.assertEqual(index, 1) + self.assertIsNone(excs[index]) + self.assertIsInstance(excs[0], ValueError) + self.assertEqual(len(excs), 2) + + self.run_coro(run()) + + class PyEagerTaskFactoryLoopTests(EagerTaskFactoryLoopTests, test_utils.TestCase): Task = tasks._PyTask diff --git a/Lib/test/test_asyncio/test_runners.py b/Lib/test/test_asyncio/test_runners.py index 266f057f0776c3..45f70d09a2083a 100644 --- a/Lib/test/test_asyncio/test_runners.py +++ b/Lib/test/test_asyncio/test_runners.py @@ -93,8 +93,8 @@ async def main(): def test_asyncio_run_only_coro(self): for o in {1, lambda: None}: with self.subTest(obj=o), \ - self.assertRaisesRegex(ValueError, - 'a coroutine was expected'): + self.assertRaisesRegex(TypeError, + 'an awaitable is required'): asyncio.run(o) def test_asyncio_run_debug(self): @@ -319,19 +319,28 @@ async def f(): def test_run_non_coro(self): with asyncio.Runner() as runner: with self.assertRaisesRegex( - ValueError, - "a coroutine was expected" + TypeError, + "an awaitable is required" ): runner.run(123) def test_run_future(self): with asyncio.Runner() as runner: - with self.assertRaisesRegex( - ValueError, - "a coroutine was expected" - ): - fut = runner.get_loop().create_future() - runner.run(fut) + fut = runner.get_loop().create_future() + fut.set_result('done') + self.assertEqual('done', runner.run(fut)) + + def test_run_awaitable(self): + class MyAwaitable: + def __await__(self): + return self.run().__await__() + + @staticmethod + async def run(): + return 'done' + + with asyncio.Runner() as runner: + self.assertEqual('done', runner.run(MyAwaitable())) def test_explicit_close(self): runner = asyncio.Runner() diff --git a/Lib/test/test_asyncio/test_staggered.py b/Lib/test/test_asyncio/test_staggered.py index e6e32f7dbbbcba..8cd98394aea8f8 100644 --- a/Lib/test/test_asyncio/test_staggered.py +++ b/Lib/test/test_asyncio/test_staggered.py @@ -82,16 +82,64 @@ async def test_none_successful(self): async def coro(index): raise ValueError(index) + for delay in [None, 0, 0.1, 1]: + with self.subTest(delay=delay): + winner, index, excs = await staggered_race( + [ + lambda: coro(0), + lambda: coro(1), + ], + delay=delay, + ) + + self.assertIs(winner, None) + self.assertIs(index, None) + self.assertEqual(len(excs), 2) + self.assertIsInstance(excs[0], ValueError) + self.assertIsInstance(excs[1], ValueError) + + async def test_long_delay_early_failure(self): + async def coro(index): + await asyncio.sleep(0) # Dummy coroutine for the 1 case + if index == 0: + await asyncio.sleep(0.1) # Dummy coroutine + raise ValueError(index) + + return f'Res: {index}' + winner, index, excs = await staggered_race( [ lambda: coro(0), lambda: coro(1), ], - delay=None, + delay=10, ) - self.assertIs(winner, None) - self.assertIs(index, None) + self.assertEqual(winner, 'Res: 1') + self.assertEqual(index, 1) self.assertEqual(len(excs), 2) self.assertIsInstance(excs[0], ValueError) - self.assertIsInstance(excs[1], ValueError) + self.assertIsNone(excs[1]) + + def test_loop_argument(self): + loop = asyncio.new_event_loop() + async def coro(): + self.assertEqual(loop, asyncio.get_running_loop()) + return 'coro' + + async def main(): + winner, index, excs = await staggered_race( + [coro], + delay=0.1, + loop=loop + ) + + self.assertEqual(winner, 'coro') + self.assertEqual(index, 0) + + loop.run_until_complete(main()) + loop.close() + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_asyncio/utils.py b/Lib/test/test_asyncio/utils.py index 35893ab3118e1e..b8dbe7feaac3f4 100644 --- a/Lib/test/test_asyncio/utils.py +++ b/Lib/test/test_asyncio/utils.py @@ -15,6 +15,7 @@ import unittest import weakref import warnings +from ast import literal_eval from unittest import mock from http.server import HTTPServer @@ -56,24 +57,8 @@ def data_file(*filename): ONLYKEY = data_file('certdata', 'ssl_key.pem') SIGNED_CERTFILE = data_file('certdata', 'keycert3.pem') SIGNING_CA = data_file('certdata', 'pycacert.pem') -PEERCERT = { - 'OCSP': ('http://testca.pythontest.net/testca/ocsp/',), - 'caIssuers': ('http://testca.pythontest.net/testca/pycacert.cer',), - 'crlDistributionPoints': ('http://testca.pythontest.net/testca/revocation.crl',), - 'issuer': ((('countryName', 'XY'),), - (('organizationName', 'Python Software Foundation CA'),), - (('commonName', 'our-ca-server'),)), - 'notAfter': 'Oct 28 14:23:16 2037 GMT', - 'notBefore': 'Aug 29 14:23:16 2018 GMT', - 'serialNumber': 'CB2D80995A69525C', - 'subject': ((('countryName', 'XY'),), - (('localityName', 'Castle Anthrax'),), - (('organizationName', 'Python Software Foundation'),), - (('commonName', 'localhost'),)), - 'subjectAltName': (('DNS', 'localhost'),), - 'version': 3 -} - +with open(data_file('certdata', 'keycert3.pem.reference')) as file: + PEERCERT = literal_eval(file.read()) def simple_server_sslcontext(): server_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py index 2ea97e797a4892..d884f54940b471 100644 --- a/Lib/test/test_builtin.py +++ b/Lib/test/test_builtin.py @@ -2607,6 +2607,7 @@ def test_new_type(self): self.assertEqual(A.__module__, __name__) self.assertEqual(A.__bases__, (object,)) self.assertIs(A.__base__, object) + self.assertNotIn('__firstlineno__', A.__dict__) x = A() self.assertIs(type(x), A) self.assertIs(x.__class__, A) @@ -2685,6 +2686,17 @@ def test_type_qualname(self): A.__qualname__ = b'B' self.assertEqual(A.__qualname__, 'D.E') + def test_type_firstlineno(self): + A = type('A', (), {'__firstlineno__': 42}) + self.assertEqual(A.__name__, 'A') + self.assertEqual(A.__module__, __name__) + self.assertEqual(A.__dict__['__firstlineno__'], 42) + A.__module__ = 'testmodule' + self.assertEqual(A.__module__, 'testmodule') + self.assertNotIn('__firstlineno__', A.__dict__) + A.__firstlineno__ = 43 + self.assertEqual(A.__dict__['__firstlineno__'], 43) + def test_type_typeparams(self): class A[T]: pass diff --git a/Lib/test/test_capi/test_codecs.py b/Lib/test/test_capi/test_codecs.py index bd521a509d07ec..85491a89947318 100644 --- a/Lib/test/test_capi/test_codecs.py +++ b/Lib/test/test_capi/test_codecs.py @@ -1,13 +1,20 @@ -import unittest +import codecs +import contextlib +import io +import re import sys +import unittest +import unittest.mock as mock +import _testcapi from test.support import import_helper _testlimitedcapi = import_helper.import_module('_testlimitedcapi') NULL = None +BAD_ARGUMENT = re.escape('bad argument type for built-in operation') -class CAPITest(unittest.TestCase): +class CAPIUnicodeTest(unittest.TestCase): # TODO: Test the following functions: # # PyUnicode_BuildEncodingMap @@ -516,5 +523,291 @@ def test_asrawunicodeescapestring(self): # CRASHES asrawunicodeescapestring(NULL) +class CAPICodecs(unittest.TestCase): + + def setUp(self): + # Encoding names are normalized internally by converting them + # to lowercase and their hyphens are replaced by underscores. + self.encoding_name = 'test.test_capi.test_codecs.codec_reversed' + # Make sure that our custom codec is not already registered (that + # way we know whether we correctly unregistered the custom codec + # after a test or not). + self.assertRaises(LookupError, codecs.lookup, self.encoding_name) + # create the search function without registering yet + self._create_custom_codec() + + def _create_custom_codec(self): + def codec_encoder(m, errors='strict'): + return (type(m)().join(reversed(m)), len(m)) + + def codec_decoder(c, errors='strict'): + return (type(c)().join(reversed(c)), len(c)) + + class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input, final=False): + return codec_encoder(input) + + class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input, final=False): + return codec_decoder(input) + + class StreamReader(codecs.StreamReader): + def encode(self, input, errors='strict'): + return codec_encoder(input, errors=errors) + + def decode(self, input, errors='strict'): + return codec_decoder(input, errors=errors) + + class StreamWriter(codecs.StreamWriter): + def encode(self, input, errors='strict'): + return codec_encoder(input, errors=errors) + + def decode(self, input, errors='strict'): + return codec_decoder(input, errors=errors) + + info = codecs.CodecInfo( + encode=codec_encoder, + decode=codec_decoder, + streamreader=StreamReader, + streamwriter=StreamWriter, + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + name=self.encoding_name + ) + + def search_function(encoding): + if encoding == self.encoding_name: + return info + return None + + self.codec_info = info + self.search_function = search_function + + @contextlib.contextmanager + def use_custom_encoder(self): + self.assertRaises(LookupError, codecs.lookup, self.encoding_name) + codecs.register(self.search_function) + yield + codecs.unregister(self.search_function) + self.assertRaises(LookupError, codecs.lookup, self.encoding_name) + + def test_codec_register(self): + search_function, encoding = self.search_function, self.encoding_name + # register the search function using the C API + self.assertIsNone(_testcapi.codec_register(search_function)) + # in case the test failed before cleaning up + self.addCleanup(codecs.unregister, self.search_function) + self.assertIs(codecs.lookup(encoding), search_function(encoding)) + self.assertEqual(codecs.encode('123', encoding=encoding), '321') + # unregister the search function using the regular API + codecs.unregister(search_function) + self.assertRaises(LookupError, codecs.lookup, encoding) + + def test_codec_unregister(self): + search_function, encoding = self.search_function, self.encoding_name + self.assertRaises(LookupError, codecs.lookup, encoding) + # register the search function using the regular API + codecs.register(search_function) + # in case the test failed before cleaning up + self.addCleanup(codecs.unregister, self.search_function) + self.assertIsNotNone(codecs.lookup(encoding)) + # unregister the search function using the C API + self.assertIsNone(_testcapi.codec_unregister(search_function)) + self.assertRaises(LookupError, codecs.lookup, encoding) + + def test_codec_known_encoding(self): + self.assertRaises(LookupError, codecs.lookup, 'unknown-codec') + self.assertFalse(_testcapi.codec_known_encoding('unknown-codec')) + self.assertFalse(_testcapi.codec_known_encoding('unknown_codec')) + self.assertFalse(_testcapi.codec_known_encoding('UNKNOWN-codec')) + + encoding_name = self.encoding_name + self.assertRaises(LookupError, codecs.lookup, encoding_name) + + codecs.register(self.search_function) + self.addCleanup(codecs.unregister, self.search_function) + + for name in [ + encoding_name, + encoding_name.upper(), + encoding_name.replace('_', '-'), + ]: + with self.subTest(name): + self.assertTrue(_testcapi.codec_known_encoding(name)) + + def test_codec_encode(self): + encode = _testcapi.codec_encode + self.assertEqual(encode('a', 'utf-8', NULL), b'a') + self.assertEqual(encode('a', 'utf-8', 'strict'), b'a') + self.assertEqual(encode('[é]', 'ascii', 'ignore'), b'[]') + + self.assertRaises(TypeError, encode, NULL, 'ascii', 'strict') + with self.assertRaisesRegex(TypeError, BAD_ARGUMENT): + encode('a', NULL, 'strict') + + def test_codec_decode(self): + decode = _testcapi.codec_decode + + s = 'a\xa1\u4f60\U0001f600' + b = s.encode() + + self.assertEqual(decode(b, 'utf-8', 'strict'), s) + self.assertEqual(decode(b, 'utf-8', NULL), s) + self.assertEqual(decode(b, 'latin1', 'strict'), b.decode('latin1')) + self.assertRaises(UnicodeDecodeError, decode, b, 'ascii', 'strict') + self.assertRaises(UnicodeDecodeError, decode, b, 'ascii', NULL) + self.assertEqual(decode(b, 'ascii', 'replace'), 'a' + '\ufffd'*9) + + # _codecs.decode() only reports an unknown error handling name when + # the corresponding error handling function is used; this difers + # from PyUnicode_Decode() which checks that both the encoding and + # the error handling name are recognized before even attempting to + # call the decoder. + self.assertEqual(decode(b'', 'utf-8', 'unknown-error-handler'), '') + self.assertEqual(decode(b'a', 'utf-8', 'unknown-error-handler'), 'a') + + self.assertRaises(TypeError, decode, NULL, 'ascii', 'strict') + with self.assertRaisesRegex(TypeError, BAD_ARGUMENT): + decode(b, NULL, 'strict') + + def test_codec_encoder(self): + codec_encoder = _testcapi.codec_encoder + + with self.use_custom_encoder(): + encoder = codec_encoder(self.encoding_name) + self.assertIs(encoder, self.codec_info.encode) + + with self.assertRaisesRegex(TypeError, BAD_ARGUMENT): + codec_encoder(NULL) + + def test_codec_decoder(self): + codec_decoder = _testcapi.codec_decoder + + with self.use_custom_encoder(): + decoder = codec_decoder(self.encoding_name) + self.assertIs(decoder, self.codec_info.decode) + + with self.assertRaisesRegex(TypeError, BAD_ARGUMENT): + codec_decoder(NULL) + + def test_codec_incremental_encoder(self): + codec_incremental_encoder = _testcapi.codec_incremental_encoder + + with self.use_custom_encoder(): + encoding = self.encoding_name + + for errors in ['strict', NULL]: + with self.subTest(errors): + encoder = codec_incremental_encoder(encoding, errors) + self.assertIsInstance(encoder, self.codec_info.incrementalencoder) + + with self.assertRaisesRegex(TypeError, BAD_ARGUMENT): + codec_incremental_encoder(NULL, 'strict') + + def test_codec_incremental_decoder(self): + codec_incremental_decoder = _testcapi.codec_incremental_decoder + + with self.use_custom_encoder(): + encoding = self.encoding_name + + for errors in ['strict', NULL]: + with self.subTest(errors): + decoder = codec_incremental_decoder(encoding, errors) + self.assertIsInstance(decoder, self.codec_info.incrementaldecoder) + + with self.assertRaisesRegex(TypeError, BAD_ARGUMENT): + codec_incremental_decoder(NULL, 'strict') + + def test_codec_stream_reader(self): + codec_stream_reader = _testcapi.codec_stream_reader + + with self.use_custom_encoder(): + encoding, stream = self.encoding_name, io.StringIO() + for errors in ['strict', NULL]: + with self.subTest(errors): + writer = codec_stream_reader(encoding, stream, errors) + self.assertIsInstance(writer, self.codec_info.streamreader) + + with self.assertRaisesRegex(TypeError, BAD_ARGUMENT): + codec_stream_reader(NULL, stream, 'strict') + + def test_codec_stream_writer(self): + codec_stream_writer = _testcapi.codec_stream_writer + + with self.use_custom_encoder(): + encoding, stream = self.encoding_name, io.StringIO() + for errors in ['strict', NULL]: + with self.subTest(errors): + writer = codec_stream_writer(encoding, stream, errors) + self.assertIsInstance(writer, self.codec_info.streamwriter) + + with self.assertRaisesRegex(TypeError, BAD_ARGUMENT): + codec_stream_writer(NULL, stream, 'strict') + + +class CAPICodecErrors(unittest.TestCase): + + def test_codec_register_error(self): + # for cleaning up between tests + from _codecs import _unregister_error as _codecs_unregister_error + + self.assertRaises(LookupError, _testcapi.codec_lookup_error, 'custom') + + def custom_error_handler(exc): + raise exc + + error_handler = mock.Mock(wraps=custom_error_handler) + _testcapi.codec_register_error('custom', error_handler) + self.addCleanup(_codecs_unregister_error, 'custom') + + self.assertRaises(UnicodeEncodeError, codecs.encode, + '\xff', 'ascii', errors='custom') + error_handler.assert_called_once() + error_handler.reset_mock() + + self.assertRaises(UnicodeDecodeError, codecs.decode, + b'\xff', 'ascii', errors='custom') + error_handler.assert_called_once() + + # _codecs._unregister_error directly delegates to the internal C + # function so a Python-level function test is sufficient (it is + # tested in test_codeccallbacks). + + def test_codec_lookup_error(self): + codec_lookup_error = _testcapi.codec_lookup_error + self.assertIs(codec_lookup_error(NULL), codecs.strict_errors) + self.assertIs(codec_lookup_error('strict'), codecs.strict_errors) + self.assertIs(codec_lookup_error('ignore'), codecs.ignore_errors) + self.assertIs(codec_lookup_error('replace'), codecs.replace_errors) + self.assertIs(codec_lookup_error('xmlcharrefreplace'), codecs.xmlcharrefreplace_errors) + self.assertIs(codec_lookup_error('namereplace'), codecs.namereplace_errors) + self.assertRaises(LookupError, codec_lookup_error, 'unknown') + + def test_codec_error_handlers(self): + exceptions = [ + # A UnicodeError with an empty message currently crashes: + # See: https://github.com/python/cpython/issues/123378 + # UnicodeEncodeError('bad', '', 0, 1, 'reason'), + UnicodeEncodeError('bad', 'x', 0, 1, 'reason'), + UnicodeEncodeError('bad', 'xyz123', 0, 1, 'reason'), + UnicodeEncodeError('bad', 'xyz123', 1, 4, 'reason'), + ] + + strict_handler = _testcapi.codec_strict_errors + for exc in exceptions: + with self.subTest(handler=strict_handler, exc=exc): + self.assertRaises(UnicodeEncodeError, strict_handler, exc) + + for handler in [ + _testcapi.codec_ignore_errors, + _testcapi.codec_replace_errors, + _testcapi.codec_xmlcharrefreplace_errors, + _testlimitedcapi.codec_namereplace_errors, + ]: + for exc in exceptions: + with self.subTest(handler=handler, exc=exc): + self.assertIsInstance(handler(exc), tuple) + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_capi/test_config.py b/Lib/test/test_capi/test_config.py index 01637e1cb7b6e5..71fb9ae45c7c30 100644 --- a/Lib/test/test_capi/test_config.py +++ b/Lib/test/test_capi/test_config.py @@ -68,7 +68,7 @@ def test_config_get(self): ("parser_debug", bool, None), ("parse_argv", bool, None), ("pathconfig_warnings", bool, None), - ("perf_profiling", bool, None), + ("perf_profiling", int, None), ("platlibdir", str, "platlibdir"), ("prefix", str | None, "prefix"), ("program_name", str, None), diff --git a/Lib/test/test_class.py b/Lib/test/test_class.py index 902f788edc22f0..b88c4d16ba4ef4 100644 --- a/Lib/test/test_class.py +++ b/Lib/test/test_class.py @@ -1,6 +1,7 @@ "Test the functionality of Python classes implementing operators." import unittest +from test.support import cpython_only, import_helper, script_helper testmeths = [ @@ -932,6 +933,36 @@ class C: C.a = X() C.a = X() + @cpython_only + def test_detach_materialized_dict_no_memory(self): + # Skip test if _testcapi is not available: + import_helper.import_module('_testcapi') + + code = """if 1: + import test.support + import _testcapi + + class A: + def __init__(self): + self.a = 1 + self.b = 2 + a = A() + d = a.__dict__ + with test.support.catch_unraisable_exception() as ex: + _testcapi.set_nomemory(0, 1) + del a + assert ex.unraisable.exc_type is MemoryError + try: + d["a"] + except KeyError: + pass + else: + assert False, "KeyError not raised" + """ + rc, out, err = script_helper.assert_python_ok("-c", code) + self.assertEqual(rc, 0) + self.assertFalse(out, msg=out.decode('utf-8')) + self.assertFalse(err, msg=err.decode('utf-8')) if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_codeccallbacks.py b/Lib/test/test_codeccallbacks.py index 4991330489d139..86e5e5c1474674 100644 --- a/Lib/test/test_codeccallbacks.py +++ b/Lib/test/test_codeccallbacks.py @@ -1,3 +1,4 @@ +from _codecs import _unregister_error as _codecs_unregister_error import codecs import html.entities import itertools @@ -1210,7 +1211,6 @@ def replace_with_long(exc): '\ufffd\x00\x00' ) - def test_fake_error_class(self): handlers = [ codecs.strict_errors, @@ -1235,6 +1235,31 @@ class FakeUnicodeError(Exception): with self.assertRaises((TypeError, FakeUnicodeError)): handler(FakeUnicodeError()) + def test_reject_unregister_builtin_error_handler(self): + for name in [ + 'strict', 'ignore', 'replace', 'backslashreplace', 'namereplace', + 'xmlcharrefreplace', 'surrogateescape', 'surrogatepass', + ]: + with self.subTest(name): + self.assertRaises(ValueError, _codecs_unregister_error, name) + + def test_unregister_custom_error_handler(self): + def custom_handler(exc): + raise exc + + custom_name = 'test.test_unregister_custom_error_handler' + self.assertRaises(LookupError, codecs.lookup_error, custom_name) + codecs.register_error(custom_name, custom_handler) + self.assertIs(codecs.lookup_error(custom_name), custom_handler) + self.assertTrue(_codecs_unregister_error(custom_name)) + self.assertRaises(LookupError, codecs.lookup_error, custom_name) + + def test_unregister_custom_unknown_error_handler(self): + unknown_name = 'test.test_unregister_custom_unknown_error_handler' + self.assertRaises(LookupError, codecs.lookup_error, unknown_name) + self.assertFalse(_codecs_unregister_error(unknown_name)) + self.assertRaises(LookupError, codecs.lookup_error, unknown_name) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_compile.py b/Lib/test/test_compile.py index 736eff35c1d5f2..f7ef7a1c26f7bd 100644 --- a/Lib/test/test_compile.py +++ b/Lib/test/test_compile.py @@ -1,6 +1,7 @@ import contextlib import dis import io +import itertools import math import opcode import os @@ -1527,6 +1528,45 @@ async def name_4(): pass [[]] +class TestBooleanExpression(unittest.TestCase): + class Value: + def __init__(self): + self.called = 0 + + def __bool__(self): + self.called += 1 + return self.value + + class Yes(Value): + value = True + + class No(Value): + value = False + + def test_short_circuit_and(self): + v = [self.Yes(), self.No(), self.Yes()] + res = v[0] and v[1] and v[0] + self.assertIs(res, v[1]) + self.assertEqual([e.called for e in v], [1, 1, 0]) + + def test_short_circuit_or(self): + v = [self.No(), self.Yes(), self.No()] + res = v[0] or v[1] or v[0] + self.assertIs(res, v[1]) + self.assertEqual([e.called for e in v], [1, 1, 0]) + + def test_compound(self): + # See gh-124285 + v = [self.No(), self.Yes(), self.Yes(), self.Yes()] + res = v[0] and v[1] or v[2] or v[3] + self.assertIs(res, v[2]) + self.assertEqual([e.called for e in v], [1, 0, 1, 0]) + + v = [self.No(), self.No(), self.Yes(), self.Yes(), self.No()] + res = v[0] or v[1] and v[2] or v[3] or v[4] + self.assertIs(res, v[3]) + self.assertEqual([e.called for e in v], [1, 1, 0, 1, 0]) + @requires_debug_ranges() class TestSourcePositions(unittest.TestCase): # Ensure that compiled code snippets have correct line and column numbers @@ -2648,6 +2688,22 @@ def test_nested(self): self.compare_instructions(seq, [('LOAD_CONST', 1, 1, 0, 0, 0)]) self.compare_instructions(seq.get_nested()[0], [('LOAD_CONST', 2, 2, 0, 0, 0)]) + def test_static_attributes_are_sorted(self): + code = ( + 'class T:\n' + ' def __init__(self):\n' + ' self.{V1} = 10\n' + ' self.{V2} = 10\n' + ' def foo(self):\n' + ' self.{V3} = 10\n' + ) + attributes = ("a", "b", "c") + for perm in itertools.permutations(attributes): + var_names = {f'V{i + 1}': name for i, name in enumerate(perm)} + ns = run_code(code.format(**var_names)) + t = ns['T'] + self.assertEqual(t.__static_attributes__, attributes) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_dataclasses/__init__.py b/Lib/test/test_dataclasses/__init__.py index 6934e88d9d338c..2984f4261bd2c4 100644 --- a/Lib/test/test_dataclasses/__init__.py +++ b/Lib/test/test_dataclasses/__init__.py @@ -17,7 +17,8 @@ from typing import ClassVar, Any, List, Union, Tuple, Dict, Generic, TypeVar, Optional, Protocol, DefaultDict from typing import get_type_hints from collections import deque, OrderedDict, namedtuple, defaultdict -from functools import total_ordering +from copy import deepcopy +from functools import total_ordering, wraps import typing # Needed for the string "typing.ClassVar[int]" to work as an annotation. import dataclasses # Needed for the string "dataclasses.InitVar[int]" to work as an annotation. @@ -60,7 +61,7 @@ class C: x: int = field(default=1, default_factory=int) def test_field_repr(self): - int_field = field(default=1, init=True, repr=False) + int_field = field(default=1, init=True, repr=False, doc='Docstring') int_field.name = "id" repr_output = repr(int_field) expected_output = "Field(name='id',type=None," \ @@ -68,6 +69,7 @@ def test_field_repr(self): "init=True,repr=False,hash=None," \ "compare=True,metadata=mappingproxy({})," \ f"kw_only={MISSING!r}," \ + "doc='Docstring'," \ "_field_type=None)" self.assertEqual(repr_output, expected_output) @@ -3175,6 +3177,48 @@ class C: with self.assertRaisesRegex(TypeError, 'unhashable type'): hash(C({})) + def test_frozen_deepcopy_without_slots(self): + # see: https://github.com/python/cpython/issues/89683 + @dataclass(frozen=True, slots=False) + class C: + s: str + + c = C('hello') + self.assertEqual(deepcopy(c), c) + + def test_frozen_deepcopy_with_slots(self): + # see: https://github.com/python/cpython/issues/89683 + with self.subTest('generated __slots__'): + @dataclass(frozen=True, slots=True) + class C: + s: str + + c = C('hello') + self.assertEqual(deepcopy(c), c) + + with self.subTest('user-defined __slots__ and no __{get,set}state__'): + @dataclass(frozen=True, slots=False) + class C: + __slots__ = ('s',) + s: str + + # with user-defined slots, __getstate__ and __setstate__ are not + # automatically added, hence the error + err = r"^cannot\ assign\ to\ field\ 's'$" + self.assertRaisesRegex(FrozenInstanceError, err, deepcopy, C('')) + + with self.subTest('user-defined __slots__ and __{get,set}state__'): + @dataclass(frozen=True, slots=False) + class C: + __slots__ = ('s',) + __getstate__ = dataclasses._dataclass_getstate + __setstate__ = dataclasses._dataclass_setstate + + s: str + + c = C('hello') + self.assertEqual(deepcopy(c), c) + class TestSlots(unittest.TestCase): def test_simple(self): @@ -3261,7 +3305,7 @@ class Base(Root4): j: str h: str - self.assertEqual(Base.__slots__, ('y', )) + self.assertEqual(Base.__slots__, ('y',)) @dataclass(slots=True) class Derived(Base): @@ -3271,7 +3315,7 @@ class Derived(Base): k: str h: str - self.assertEqual(Derived.__slots__, ('z', )) + self.assertEqual(Derived.__slots__, ('z',)) @dataclass class AnotherDerived(Base): @@ -3279,6 +3323,24 @@ class AnotherDerived(Base): self.assertNotIn('__slots__', AnotherDerived.__dict__) + def test_slots_with_docs(self): + class Root: + __slots__ = {'x': 'x'} + + @dataclass(slots=True) + class Base(Root): + y1: int = field(doc='y1') + y2: int + + self.assertEqual(Base.__slots__, {'y1': 'y1', 'y2': None}) + + @dataclass(slots=True) + class Child(Base): + z1: int = field(doc='z1') + z2: int + + self.assertEqual(Child.__slots__, {'z1': 'z1', 'z2': None}) + def test_cant_inherit_from_iterator_slots(self): class Root: @@ -4869,5 +4931,129 @@ class A: self.assertEqual(fs[0].name, 'x') +class TestZeroArgumentSuperWithSlots(unittest.TestCase): + def test_zero_argument_super(self): + @dataclass(slots=True) + class A: + def foo(self): + super() + + A().foo() + + def test_dunder_class_with_old_property(self): + @dataclass(slots=True) + class A: + def _get_foo(slf): + self.assertIs(__class__, type(slf)) + self.assertIs(__class__, slf.__class__) + return __class__ + + def _set_foo(slf, value): + self.assertIs(__class__, type(slf)) + self.assertIs(__class__, slf.__class__) + + def _del_foo(slf): + self.assertIs(__class__, type(slf)) + self.assertIs(__class__, slf.__class__) + + foo = property(_get_foo, _set_foo, _del_foo) + + a = A() + self.assertIs(a.foo, A) + a.foo = 4 + del a.foo + + def test_dunder_class_with_new_property(self): + @dataclass(slots=True) + class A: + @property + def foo(slf): + return slf.__class__ + + @foo.setter + def foo(slf, value): + self.assertIs(__class__, type(slf)) + + @foo.deleter + def foo(slf): + self.assertIs(__class__, type(slf)) + + a = A() + self.assertIs(a.foo, A) + a.foo = 4 + del a.foo + + # Test the parts of a property individually. + def test_slots_dunder_class_property_getter(self): + @dataclass(slots=True) + class A: + @property + def foo(slf): + return __class__ + + a = A() + self.assertIs(a.foo, A) + + def test_slots_dunder_class_property_setter(self): + @dataclass(slots=True) + class A: + foo = property() + @foo.setter + def foo(slf, val): + self.assertIs(__class__, type(slf)) + + a = A() + a.foo = 4 + + def test_slots_dunder_class_property_deleter(self): + @dataclass(slots=True) + class A: + foo = property() + @foo.deleter + def foo(slf): + self.assertIs(__class__, type(slf)) + + a = A() + del a.foo + + def test_wrapped(self): + def mydecorator(f): + @wraps(f) + def wrapper(*args, **kwargs): + return f(*args, **kwargs) + return wrapper + + @dataclass(slots=True) + class A: + @mydecorator + def foo(self): + super() + + A().foo() + + def test_remembered_class(self): + # Apply the dataclass decorator manually (not when the class + # is created), so that we can keep a reference to the + # undecorated class. + class A: + def cls(self): + return __class__ + + self.assertIs(A().cls(), A) + + B = dataclass(slots=True)(A) + self.assertIs(B().cls(), B) + + # This is undesirable behavior, but is a function of how + # modifying __class__ in the closure works. I'm not sure this + # should be tested or not: I don't really want to guarantee + # this behavior, but I don't want to lose the point that this + # is how it works. + + # The underlying class is "broken" by changing its __class__ + # in A.foo() to B. This normally isn't a problem, because no + # one will be keeping a reference to the underlying class A. + self.assertIs(A().cls(), B) + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_decimal.py b/Lib/test/test_decimal.py index 12479e32d0f5db..c591fd54430b18 100644 --- a/Lib/test/test_decimal.py +++ b/Lib/test/test_decimal.py @@ -4381,7 +4381,8 @@ def test_module_attributes(self): self.assertEqual(C.__version__, P.__version__) - self.assertEqual(dir(C), dir(P)) + self.assertLessEqual(set(dir(C)), set(dir(P))) + self.assertEqual([n for n in dir(C) if n[:2] != '__'], sorted(P.__all__)) def test_context_attributes(self): diff --git a/Lib/test/test_dis.py b/Lib/test/test_dis.py index bccd2182412577..1ee0fbe98914be 100644 --- a/Lib/test/test_dis.py +++ b/Lib/test/test_dis.py @@ -380,6 +380,23 @@ def wrap_func_w_kwargs(): RETURN_CONST 3 (None) """ +fn_with_annotate_str = """ +def foo(a: int, b: str) -> str: + return a * b +""" + +dis_fn_with_annotate_str = """\ + 0 RESUME 0 + + 2 LOAD_CONST 0 (", line 2>) + MAKE_FUNCTION + LOAD_CONST 1 (", line 2>) + MAKE_FUNCTION + SET_FUNCTION_ATTRIBUTE 16 (annotate) + STORE_NAME 0 (foo) + RETURN_CONST 2 (None) +""" + compound_stmt_str = """\ x = 0 while 1: @@ -1098,6 +1115,7 @@ def test_disassemble_str(self): self.do_disassembly_test(expr_str, dis_expr_str) self.do_disassembly_test(simple_stmt_str, dis_simple_stmt_str) self.do_disassembly_test(annot_stmt_str, dis_annot_stmt_str) + self.do_disassembly_test(fn_with_annotate_str, dis_fn_with_annotate_str) self.do_disassembly_test(compound_stmt_str, dis_compound_stmt_str) def test_disassemble_bytes(self): diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py index 7c5cb855a397ab..3edc19d8254754 100644 --- a/Lib/test/test_embed.py +++ b/Lib/test/test_embed.py @@ -560,7 +560,7 @@ class InitConfigTests(EmbeddingTestsMixin, unittest.TestCase): 'cpu_count': -1, 'faulthandler': False, 'tracemalloc': 0, - 'perf_profiling': False, + 'perf_profiling': 0, 'import_time': False, 'code_debug_ranges': True, 'show_ref_count': False, @@ -652,7 +652,7 @@ class InitConfigTests(EmbeddingTestsMixin, unittest.TestCase): use_hash_seed=False, faulthandler=False, tracemalloc=False, - perf_profiling=False, + perf_profiling=0, pathconfig_warnings=False, ) if MS_WINDOWS: @@ -966,7 +966,7 @@ def test_init_from_config(self): 'use_hash_seed': True, 'hash_seed': 123, 'tracemalloc': 2, - 'perf_profiling': False, + 'perf_profiling': 0, 'import_time': True, 'code_debug_ranges': False, 'show_ref_count': True, @@ -1031,7 +1031,7 @@ def test_init_compat_env(self): 'use_hash_seed': True, 'hash_seed': 42, 'tracemalloc': 2, - 'perf_profiling': False, + 'perf_profiling': 0, 'import_time': True, 'code_debug_ranges': False, 'malloc_stats': True, @@ -1051,6 +1051,7 @@ def test_init_compat_env(self): 'module_search_paths': self.IGNORE_CONFIG, 'safe_path': True, 'int_max_str_digits': 4567, + 'perf_profiling': 1, } if Py_STATS: config['_pystats'] = 1 @@ -1066,7 +1067,7 @@ def test_init_python_env(self): 'use_hash_seed': True, 'hash_seed': 42, 'tracemalloc': 2, - 'perf_profiling': False, + 'perf_profiling': 0, 'import_time': True, 'code_debug_ranges': False, 'malloc_stats': True, @@ -1086,6 +1087,7 @@ def test_init_python_env(self): 'module_search_paths': self.IGNORE_CONFIG, 'safe_path': True, 'int_max_str_digits': 4567, + 'perf_profiling': 1, } if Py_STATS: config['_pystats'] = True @@ -1763,6 +1765,7 @@ def test_initconfig_api(self): 'xoptions': {'faulthandler': True}, 'hash_seed': 10, 'use_hash_seed': True, + 'perf_profiling': 2, } config_dev_mode(preconfig, config) self.check_all_configs("test_initconfig_api", config, preconfig, diff --git a/Lib/test/test_frame.py b/Lib/test/test_frame.py index ca88e657367d9a..32de8ed9a13f80 100644 --- a/Lib/test/test_frame.py +++ b/Lib/test/test_frame.py @@ -494,6 +494,27 @@ class ObjectSubclass: with self.assertRaises(TypeError): proxy[obj] = 0 + def test_constructor(self): + FrameLocalsProxy = type([sys._getframe().f_locals + for x in range(1)][0]) + self.assertEqual(FrameLocalsProxy.__name__, 'FrameLocalsProxy') + + def make_frame(): + x = 1 + y = 2 + return sys._getframe() + + proxy = FrameLocalsProxy(make_frame()) + self.assertEqual(proxy, {'x': 1, 'y': 2}) + + # constructor expects 1 frame argument + with self.assertRaises(TypeError): + FrameLocalsProxy() # no arguments + with self.assertRaises(TypeError): + FrameLocalsProxy(123) # wrong type + with self.assertRaises(TypeError): + FrameLocalsProxy(frame=sys._getframe()) # no keyword arguments + class FrameLocalsProxyMappingTests(mapping_tests.TestHashMappingProtocol): """Test that FrameLocalsProxy behaves like a Mapping (with exceptions)""" diff --git a/Lib/test/test_free_threading/test_list.py b/Lib/test/test_free_threading/test_list.py index c6b58fcd86f449..a705161369e8dd 100644 --- a/Lib/test/test_free_threading/test_list.py +++ b/Lib/test/test_free_threading/test_list.py @@ -3,10 +3,13 @@ from threading import Thread from unittest import TestCase -from test import support from test.support import threading_helper +NTHREAD = 10 +OBJECT_COUNT = 5_000 + + class C: def __init__(self, v): self.v = v @@ -14,11 +17,8 @@ def __init__(self, v): @threading_helper.requires_working_threading() class TestList(TestCase): - @support.requires_resource('cpu') def test_racing_iter_append(self): - l = [] - OBJECT_COUNT = 10000 def writer_func(): for i in range(OBJECT_COUNT): @@ -34,7 +34,7 @@ def reader_func(): writer = Thread(target=writer_func) readers = [] - for x in range(30): + for x in range(NTHREAD): reader = Thread(target=reader_func) readers.append(reader) reader.start() @@ -44,39 +44,32 @@ def reader_func(): for reader in readers: reader.join() - @support.requires_resource('cpu') def test_racing_iter_extend(self): - iters = [ - lambda x: [x], - ] - for iter_case in iters: - with self.subTest(iter=iter_case): - l = [] - OBJECT_COUNT = 10000 - - def writer_func(): - for i in range(OBJECT_COUNT): - l.extend(iter_case(C(i + OBJECT_COUNT))) - - def reader_func(): - while True: - count = len(l) - for i, x in enumerate(l): - self.assertEqual(x.v, i + OBJECT_COUNT) - if count == OBJECT_COUNT: - break - - writer = Thread(target=writer_func) - readers = [] - for x in range(30): - reader = Thread(target=reader_func) - readers.append(reader) - reader.start() - - writer.start() - writer.join() - for reader in readers: - reader.join() + l = [] + + def writer_func(): + for i in range(OBJECT_COUNT): + l.extend([C(i + OBJECT_COUNT)]) + + def reader_func(): + while True: + count = len(l) + for i, x in enumerate(l): + self.assertEqual(x.v, i + OBJECT_COUNT) + if count == OBJECT_COUNT: + break + + writer = Thread(target=writer_func) + readers = [] + for x in range(NTHREAD): + reader = Thread(target=reader_func) + readers.append(reader) + reader.start() + + writer.start() + writer.join() + for reader in readers: + reader.join() if __name__ == "__main__": diff --git a/Lib/test/test_free_threading/test_monitoring.py b/Lib/test/test_free_threading/test_monitoring.py index be582455d118ac..8fec01715531cb 100644 --- a/Lib/test/test_free_threading/test_monitoring.py +++ b/Lib/test/test_free_threading/test_monitoring.py @@ -7,7 +7,6 @@ import weakref from sys import monitoring -from test import support from test.support import threading_helper from threading import Thread, _PyRLock from unittest import TestCase @@ -15,7 +14,7 @@ class InstrumentationMultiThreadedMixin: thread_count = 10 - func_count = 200 + func_count = 50 fib = 12 def after_threads(self): @@ -37,14 +36,13 @@ def work(self, n, funcs): def start_work(self, n, funcs): # With the GIL builds we need to make sure that the hooks have # a chance to run as it's possible to run w/o releasing the GIL. - time.sleep(1) + time.sleep(0.1) self.work(n, funcs) def after_test(self): """Runs once after the test is done""" pass - @support.requires_resource('cpu') def test_instrumentation(self): # Setup a bunch of functions which will need instrumentation... funcs = [] @@ -220,29 +218,31 @@ def test_register_callback(self): for ref in self.refs: self.assertEqual(ref(), None) - @support.requires_resource('cpu') def test_set_local_trace_opcodes(self): def trace(frame, event, arg): frame.f_trace_opcodes = True return trace + loops = 1_000 + sys.settrace(trace) try: l = _PyRLock() def f(): - for i in range(3000): + for i in range(loops): with l: pass t = Thread(target=f) t.start() - for i in range(3000): + for i in range(loops): with l: pass t.join() finally: sys.settrace(None) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_free_threading/test_type.py b/Lib/test/test_free_threading/test_type.py index 977bfd2c7fd2f7..51463b6bb8c1b4 100644 --- a/Lib/test/test_free_threading/test_type.py +++ b/Lib/test/test_free_threading/test_type.py @@ -5,7 +5,6 @@ from threading import Thread from unittest import TestCase -from test import support from test.support import threading_helper @@ -97,8 +96,9 @@ def reader_func(): self.run_one(writer_func, reader_func) - @support.requires_resource('cpu') def test___class___modification(self): + loops = 200 + class Foo: pass @@ -108,7 +108,7 @@ class Bar: thing = Foo() def work(): foo = thing - for _ in range(5000): + for _ in range(loops): foo.__class__ = Bar type(foo) foo.__class__ = Foo diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index 837f3795f0842d..bdaa9a7ec4f020 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -6,6 +6,7 @@ from itertools import permutations import pickle from random import choice +import re import sys from test import support import threading @@ -210,6 +211,51 @@ def foo(bar): p2.new_attr = 'spam' self.assertEqual(p2.new_attr, 'spam') + def test_placeholders_trailing_raise(self): + PH = self.module.Placeholder + for args in [(PH,), (0, PH), (0, PH, 1, PH, PH, PH)]: + with self.assertRaises(TypeError): + self.partial(capture, *args) + + def test_placeholders(self): + PH = self.module.Placeholder + # 1 Placeholder + args = (PH, 0) + p = self.partial(capture, *args) + actual_args, actual_kwds = p('x') + self.assertEqual(actual_args, ('x', 0)) + self.assertEqual(actual_kwds, {}) + # 2 Placeholders + args = (PH, 0, PH, 1) + p = self.partial(capture, *args) + with self.assertRaises(TypeError): + p('x') + actual_args, actual_kwds = p('x', 'y') + self.assertEqual(actual_args, ('x', 0, 'y', 1)) + self.assertEqual(actual_kwds, {}) + + def test_placeholders_optimization(self): + PH = self.module.Placeholder + p = self.partial(capture, PH, 0) + p2 = self.partial(p, PH, 1, 2, 3) + self.assertEqual(p2.args, (PH, 0, 1, 2, 3)) + p3 = self.partial(p2, -1, 4) + actual_args, actual_kwds = p3(5) + self.assertEqual(actual_args, (-1, 0, 1, 2, 3, 4, 5)) + self.assertEqual(actual_kwds, {}) + # inner partial has placeholders and outer partial has no args case + p = self.partial(capture, PH, 0) + p2 = self.partial(p) + self.assertEqual(p2.args, (PH, 0)) + self.assertEqual(p2(1), ((1, 0), {})) + + def test_construct_placeholder_singleton(self): + PH = self.module.Placeholder + tp = type(PH) + self.assertIs(tp(), PH) + self.assertRaises(TypeError, tp, 1, 2) + self.assertRaises(TypeError, tp, a=1, b=2) + def test_repr(self): args = (object(), object()) args_repr = ', '.join(repr(a) for a in args) @@ -311,6 +357,23 @@ def test_setstate(self): self.assertEqual(f(2), ((2,), {})) self.assertEqual(f(), ((), {})) + # Set State with placeholders + PH = self.module.Placeholder + f = self.partial(signature) + f.__setstate__((capture, (PH, 1), dict(a=10), dict(attr=[]))) + self.assertEqual(signature(f), (capture, (PH, 1), dict(a=10), dict(attr=[]))) + msg_regex = re.escape("missing positional arguments in 'partial' call; " + "expected at least 1, got 0") + with self.assertRaisesRegex(TypeError, f'^{msg_regex}$') as cm: + f() + self.assertEqual(f(2), ((2, 1), dict(a=10))) + + # Trailing Placeholder error + f = self.partial(signature) + msg_regex = re.escape("trailing Placeholders are not allowed") + with self.assertRaisesRegex(TypeError, f'^{msg_regex}$') as cm: + f.__setstate__((capture, (1, PH), dict(a=10), dict(attr=[]))) + def test_setstate_errors(self): f = self.partial(signature) self.assertRaises(TypeError, f.__setstate__, (capture, (), {})) @@ -456,6 +519,19 @@ def __str__(self): self.assertIn('astr', r) self.assertIn("['sth']", r) + def test_placeholders_refcount_smoke(self): + PH = self.module.Placeholder + # sum supports vector call + lst1, start = [], [] + sum_lists = self.partial(sum, PH, start) + for i in range(10): + sum_lists([lst1, lst1]) + # collections.ChainMap initializer does not support vectorcall + map1, map2 = {}, {} + partial_cm = self.partial(collections.ChainMap, PH, map1) + for i in range(10): + partial_cm(map2, map2) + class TestPartialPy(TestPartial, unittest.TestCase): module = py_functools @@ -480,6 +556,19 @@ class TestPartialCSubclass(TestPartialC): class TestPartialPySubclass(TestPartialPy): partial = PyPartialSubclass + def test_subclass_optimization(self): + # `partial` input to `partial` subclass + p = py_functools.partial(min, 2) + p2 = self.partial(p, 1) + self.assertIs(p2.func, min) + self.assertEqual(p2(0), 0) + # `partial` subclass input to `partial` subclass + p = self.partial(min, 2) + p2 = self.partial(p, 1) + self.assertIs(p2.func, min) + self.assertEqual(p2(0), 0) + + class TestPartialMethod(unittest.TestCase): class A(object): @@ -617,6 +706,20 @@ def f(a, b, /): p = functools.partial(f, 1) self.assertEqual(p(2), f(1, 2)) + def test_subclass_optimization(self): + class PartialMethodSubclass(functools.partialmethod): + pass + # `partialmethod` input to `partialmethod` subclass + p = functools.partialmethod(min, 2) + p2 = PartialMethodSubclass(p, 1) + self.assertIs(p2.func, min) + self.assertEqual(p2.__get__(0)(), 0) + # `partialmethod` subclass input to `partialmethod` subclass + p = PartialMethodSubclass(min, 2) + p2 = PartialMethodSubclass(p, 1) + self.assertIs(p2.func, min) + self.assertEqual(p2.__get__(0)(), 0) + class TestUpdateWrapper(unittest.TestCase): diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py index 906f9884d6792f..bb7df1f5cfa7f7 100644 --- a/Lib/test/test_gc.py +++ b/Lib/test/test_gc.py @@ -1048,6 +1048,24 @@ class Z: callback.assert_not_called() gc.enable() + @cpython_only + def test_get_referents_on_capsule(self): + # gh-124538: Calling gc.get_referents() on an untracked capsule must not crash. + import _datetime + import _socket + untracked_capsule = _datetime.datetime_CAPI + tracked_capsule = _socket.CAPI + + # For whoever sees this in the future: if this is failing + # after making datetime's capsule tracked, that's fine -- this isn't something + # users are relying on. Just find a different capsule that is untracked. + self.assertFalse(gc.is_tracked(untracked_capsule)) + self.assertTrue(gc.is_tracked(tracked_capsule)) + + self.assertEqual(len(gc.get_referents(untracked_capsule)), 0) + gc.get_referents(tracked_capsule) + + class IncrementalGCTests(unittest.TestCase): diff --git a/Lib/test/test_generated_cases.py b/Lib/test/test_generated_cases.py index 5d20e3c30bcf10..214e53dde64bbf 100644 --- a/Lib/test/test_generated_cases.py +++ b/Lib/test/test_generated_cases.py @@ -523,6 +523,36 @@ def test_pseudo_instruction_with_flags(self): """ self.run_cases_test(input, output) + def test_pseudo_instruction_as_sequence(self): + input = """ + pseudo(OP, (in -- out1, out2)) = [ + OP1, OP2 + ]; + + inst(OP1, (--)) { + } + + inst(OP2, (--)) { + } + """ + output = """ + TARGET(OP1) { + frame->instr_ptr = next_instr; + next_instr += 1; + INSTRUCTION_STATS(OP1); + DISPATCH(); + } + + TARGET(OP2) { + frame->instr_ptr = next_instr; + next_instr += 1; + INSTRUCTION_STATS(OP2); + DISPATCH(); + } + """ + self.run_cases_test(input, output) + + def test_array_input(self): input = """ inst(OP, (below, values[oparg*2], above --)) { diff --git a/Lib/test/test_inspect/inspect_fodder2.py b/Lib/test/test_inspect/inspect_fodder2.py index 43e9f852022934..43fda6622537fc 100644 --- a/Lib/test/test_inspect/inspect_fodder2.py +++ b/Lib/test/test_inspect/inspect_fodder2.py @@ -357,3 +357,15 @@ class td354(typing.TypedDict): # line 358 td359 = typing.TypedDict('td359', (('x', int), ('y', int))) + +import dataclasses + +# line 363 +@dataclasses.dataclass +class dc364: + x: int + y: int + +# line 369 +dc370 = dataclasses.make_dataclass('dc370', (('x', int), ('y', int))) +dc371 = dataclasses.make_dataclass('dc370', (('x', int), ('y', int)), module=__name__) diff --git a/Lib/test/test_inspect/test_inspect.py b/Lib/test/test_inspect/test_inspect.py index 81188ad4d1fbe1..d2dc9e147d29c2 100644 --- a/Lib/test/test_inspect/test_inspect.py +++ b/Lib/test/test_inspect/test_inspect.py @@ -835,6 +835,47 @@ class C: nonlocal __firstlineno__ self.assertRaises(OSError, inspect.getsource, C) +class TestGetsourceStdlib(unittest.TestCase): + # Test Python implementations of the stdlib modules + + def test_getsource_stdlib_collections_abc(self): + import collections.abc + lines, lineno = inspect.getsourcelines(collections.abc.Sequence) + self.assertEqual(lines[0], 'class Sequence(Reversible, Collection):\n') + src = inspect.getsource(collections.abc.Sequence) + self.assertEqual(src.splitlines(True), lines) + + def test_getsource_stdlib_tomllib(self): + import tomllib + self.assertRaises(OSError, inspect.getsource, tomllib.TOMLDecodeError) + self.assertRaises(OSError, inspect.getsourcelines, tomllib.TOMLDecodeError) + + def test_getsource_stdlib_abc(self): + # Pure Python implementation + abc = import_helper.import_fresh_module('abc', blocked=['_abc']) + with support.swap_item(sys.modules, 'abc', abc): + self.assertRaises(OSError, inspect.getsource, abc.ABCMeta) + self.assertRaises(OSError, inspect.getsourcelines, abc.ABCMeta) + # With C acceleration + import abc + try: + src = inspect.getsource(abc.ABCMeta) + lines, lineno = inspect.getsourcelines(abc.ABCMeta) + except OSError: + pass + else: + self.assertEqual(lines[0], ' class ABCMeta(type):\n') + self.assertEqual(src.splitlines(True), lines) + + def test_getsource_stdlib_decimal(self): + # Pure Python implementation + decimal = import_helper.import_fresh_module('decimal', blocked=['_decimal']) + with support.swap_item(sys.modules, 'decimal', decimal): + src = inspect.getsource(decimal.Decimal) + lines, lineno = inspect.getsourcelines(decimal.Decimal) + self.assertEqual(lines[0], 'class Decimal(object):\n') + self.assertEqual(src.splitlines(True), lines) + class TestGetsourceInteractive(unittest.TestCase): def test_getclasses_interactive(self): # bpo-44648: simulate a REPL session; @@ -947,6 +988,11 @@ def test_typeddict(self): self.assertSourceEqual(mod2.td354, 354, 356) self.assertRaises(OSError, inspect.getsource, mod2.td359) + def test_dataclass(self): + self.assertSourceEqual(mod2.dc364, 364, 367) + self.assertRaises(OSError, inspect.getsource, mod2.dc370) + self.assertRaises(OSError, inspect.getsource, mod2.dc371) + class TestBlockComments(GetSourceBase): fodderModule = mod @@ -1010,7 +1056,7 @@ def test_findsource_without_filename(self): self.assertRaises(IOError, inspect.findsource, co) self.assertRaises(IOError, inspect.getsource, co) - def test_findsource_with_out_of_bounds_lineno(self): + def test_findsource_on_func_with_out_of_bounds_lineno(self): mod_len = len(inspect.getsource(mod)) src = '\n' * 2* mod_len + "def f(): pass" co = compile(src, mod.__file__, "exec") @@ -1018,9 +1064,20 @@ def test_findsource_with_out_of_bounds_lineno(self): eval(co, g, l) func = l['f'] self.assertEqual(func.__code__.co_firstlineno, 1+2*mod_len) - with self.assertRaisesRegex(IOError, "lineno is out of bounds"): + with self.assertRaisesRegex(OSError, "lineno is out of bounds"): inspect.findsource(func) + def test_findsource_on_class_with_out_of_bounds_lineno(self): + mod_len = len(inspect.getsource(mod)) + src = '\n' * 2* mod_len + "class A: pass" + co = compile(src, mod.__file__, "exec") + g, l = {'__name__': mod.__name__}, {} + eval(co, g, l) + cls = l['A'] + self.assertEqual(cls.__firstlineno__, 1+2*mod_len) + with self.assertRaisesRegex(OSError, "lineno is out of bounds"): + inspect.findsource(cls) + def test_getsource_on_method(self): self.assertSourceEqual(mod2.ClassWithMethod.method, 118, 119) @@ -3341,7 +3398,7 @@ def foo(cls, *, arg): ...)) def test_signature_on_partial(self): - from functools import partial + from functools import partial, Placeholder def test(): pass @@ -3396,6 +3453,25 @@ def test(a, b, *, c, d): ('d', ..., ..., "keyword_only")), ...)) + # With Placeholder + self.assertEqual(self.signature(partial(test, Placeholder, 1)), + ((('a', ..., ..., "positional_only"), + ('c', ..., ..., "keyword_only"), + ('d', ..., ..., "keyword_only")), + ...)) + + self.assertEqual(self.signature(partial(test, Placeholder, 1, c=2)), + ((('a', ..., ..., "positional_only"), + ('c', 2, ..., "keyword_only"), + ('d', ..., ..., "keyword_only")), + ...)) + + # Ensure unittest.mock.ANY & similar do not get picked up as a Placeholder + self.assertEqual(self.signature(partial(test, unittest.mock.ANY, 1, c=2)), + ((('c', 2, ..., "keyword_only"), + ('d', ..., ..., "keyword_only")), + ...)) + def test(a, *args, b, **kwargs): pass @@ -3443,6 +3519,15 @@ def test(a, *args, b, **kwargs): ('kwargs', ..., ..., "var_keyword")), ...)) + # With Placeholder + p = partial(test, Placeholder, Placeholder, 1, b=0, test=1) + self.assertEqual(self.signature(p), + ((('a', ..., ..., "positional_only"), + ('args', ..., ..., "var_positional"), + ('b', 0, ..., "keyword_only"), + ('kwargs', ..., ..., "var_keyword")), + ...)) + def test(a, b, c:int) -> 42: pass @@ -3547,6 +3632,34 @@ def foo(a, b, /, c, d, **kwargs): ('kwargs', ..., ..., 'var_keyword')), ...)) + # Positional only With Placeholder + p = partial(foo, Placeholder, 1, c=0, d=1) + self.assertEqual(self.signature(p), + ((('a', ..., ..., "positional_only"), + ('c', 0, ..., "keyword_only"), + ('d', 1, ..., "keyword_only"), + ('kwargs', ..., ..., "var_keyword")), + ...)) + + # Optionals Positional With Placeholder + def foo(a=0, b=1, /, c=2, d=3): + pass + + # Positional + p = partial(foo, Placeholder, 1, c=0, d=1) + self.assertEqual(self.signature(p), + ((('a', ..., ..., "positional_only"), + ('c', 0, ..., "keyword_only"), + ('d', 1, ..., "keyword_only")), + ...)) + + # Positional or Keyword - transformed to positional + p = partial(foo, Placeholder, 1, Placeholder, 1) + self.assertEqual(self.signature(p), + ((('a', ..., ..., "positional_only"), + ('c', ..., ..., "positional_only")), + ...)) + def test_signature_on_partialmethod(self): from functools import partialmethod @@ -3559,18 +3672,32 @@ def test(): inspect.signature(Spam.ham) class Spam: - def test(it, a, *, c) -> 'spam': + def test(it, a, b, *, c) -> 'spam': pass ham = partialmethod(test, c=1) + bar = partialmethod(test, functools.Placeholder, 1, c=1) self.assertEqual(self.signature(Spam.ham, eval_str=False), ((('it', ..., ..., 'positional_or_keyword'), ('a', ..., ..., 'positional_or_keyword'), + ('b', ..., ..., 'positional_or_keyword'), ('c', 1, ..., 'keyword_only')), 'spam')) self.assertEqual(self.signature(Spam().ham, eval_str=False), ((('a', ..., ..., 'positional_or_keyword'), + ('b', ..., ..., 'positional_or_keyword'), + ('c', 1, ..., 'keyword_only')), + 'spam')) + + # With Placeholder + self.assertEqual(self.signature(Spam.bar, eval_str=False), + ((('it', ..., ..., 'positional_only'), + ('a', ..., ..., 'positional_only'), + ('c', 1, ..., 'keyword_only')), + 'spam')) + self.assertEqual(self.signature(Spam().bar, eval_str=False), + ((('a', ..., ..., 'positional_only'), ('c', 1, ..., 'keyword_only')), 'spam')) diff --git a/Lib/test/test_itertools.py b/Lib/test/test_itertools.py index 6820dce3f12620..8469de998ba014 100644 --- a/Lib/test/test_itertools.py +++ b/Lib/test/test_itertools.py @@ -1249,10 +1249,11 @@ def test_tee(self): self.assertEqual(len(result), n) self.assertEqual([list(x) for x in result], [list('abc')]*n) - # tee pass-through to copyable iterator + # tee objects are independent (see bug gh-123884) a, b = tee('abc') c, d = tee(a) - self.assertTrue(a is c) + e, f = tee(c) + self.assertTrue(len({a, b, c, d, e, f}) == 6) # test tee_new t1, t2 = tee('abc') @@ -1759,21 +1760,36 @@ def test_tee_recipe(self): def tee(iterable, n=2): if n < 0: - raise ValueError('n must be >= 0') - iterator = iter(iterable) - shared_link = [None, None] - return tuple(_tee(iterator, shared_link) for _ in range(n)) + raise ValueError + if n == 0: + return () + iterator = _tee(iterable) + result = [iterator] + for _ in range(n - 1): + result.append(_tee(iterator)) + return tuple(result) + + class _tee: + + def __init__(self, iterable): + it = iter(iterable) + if isinstance(it, _tee): + self.iterator = it.iterator + self.link = it.link + else: + self.iterator = it + self.link = [None, None] - def _tee(iterator, link): - try: - while True: - if link[1] is None: - link[0] = next(iterator) - link[1] = [None, None] - value, link = link - yield value - except StopIteration: - return + def __iter__(self): + return self + + def __next__(self): + link = self.link + if link[1] is None: + link[0] = next(self.iterator) + link[1] = [None, None] + value, self.link = link + return value # End tee() recipe ############################################# @@ -1819,12 +1835,10 @@ def _tee(iterator, link): self.assertRaises(TypeError, tee, [1,2], 'x') self.assertRaises(TypeError, tee, [1,2], 3, 'x') - # Tests not applicable to the tee() recipe - if False: - # tee object should be instantiable - a, b = tee('abc') - c = type(a)('def') - self.assertEqual(list(c), list('def')) + # tee object should be instantiable + a, b = tee('abc') + c = type(a)('def') + self.assertEqual(list(c), list('def')) # test long-lagged and multi-way split a, b, c = tee(range(2000), 3) @@ -1845,21 +1859,19 @@ def _tee(iterator, link): self.assertEqual(len(result), n) self.assertEqual([list(x) for x in result], [list('abc')]*n) + # tee objects are independent (see bug gh-123884) + a, b = tee('abc') + c, d = tee(a) + e, f = tee(c) + self.assertTrue(len({a, b, c, d, e, f}) == 6) - # Tests not applicable to the tee() recipe - if False: - # tee pass-through to copyable iterator - a, b = tee('abc') - c, d = tee(a) - self.assertTrue(a is c) - - # test tee_new - t1, t2 = tee('abc') - tnew = type(t1) - self.assertRaises(TypeError, tnew) - self.assertRaises(TypeError, tnew, 10) - t3 = tnew(t1) - self.assertTrue(list(t1) == list(t2) == list(t3) == list('abc')) + # test tee_new + t1, t2 = tee('abc') + tnew = type(t1) + self.assertRaises(TypeError, tnew) + self.assertRaises(TypeError, tnew, 10) + t3 = tnew(t1) + self.assertTrue(list(t1) == list(t2) == list(t3) == list('abc')) # test that tee objects are weak referencable a, b = tee(range(10)) diff --git a/Lib/test/test_pdb.py b/Lib/test/test_pdb.py index db7d1b1e9cd935..4c64a800cb32d2 100644 --- a/Lib/test/test_pdb.py +++ b/Lib/test/test_pdb.py @@ -363,6 +363,54 @@ def test_pdb_breakpoint_commands(): 4 """ +def test_pdb_commands(): + """Test the commands command of pdb. + + >>> def test_function(): + ... import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + ... print(1) + ... print(2) + ... print(3) + + >>> reset_Breakpoint() + + >>> with PdbTestInput([ # doctest: +NORMALIZE_WHITESPACE + ... 'b 3', + ... 'commands', + ... 'silent', # suppress the frame status output + ... 'p "hello"', + ... 'end', + ... 'b 4', + ... 'commands', + ... 'until 5', # no output, should stop at line 5 + ... 'continue', # hit breakpoint at line 3 + ... '', # repeat continue, hit breakpoint at line 4 then `until` to line 5 + ... '', + ... ]): + ... test_function() + > (2)test_function() + -> import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + (Pdb) b 3 + Breakpoint 1 at :3 + (Pdb) commands + (com) silent + (com) p "hello" + (com) end + (Pdb) b 4 + Breakpoint 2 at :4 + (Pdb) commands + (com) until 5 + (Pdb) continue + 'hello' + (Pdb) + 1 + 2 + > (5)test_function() + -> print(3) + (Pdb) + 3 + """ + def test_pdb_breakpoint_with_filename(): """Breakpoints with filename:lineno @@ -901,6 +949,38 @@ def test_pdb_where_command(): (Pdb) continue """ +def test_pdb_restart_command(): + """Test restart command + + >>> def test_function(): + ... import pdb; pdb.Pdb(nosigint=True, readrc=False, mode='inline').set_trace() + ... x = 1 + + >>> with PdbTestInput([ # doctest: +ELLIPSIS + ... 'restart', + ... 'continue', + ... ]): + ... test_function() + > (2)test_function() + -> import pdb; pdb.Pdb(nosigint=True, readrc=False, mode='inline').set_trace() + (Pdb) restart + *** run/restart command is disabled when pdb is running in inline mode. + Use the command line interface to enable restarting your program + e.g. "python -m pdb myscript.py" + (Pdb) continue + """ + +def test_pdb_commands_with_set_trace(): + """Test that commands can be passed to Pdb.set_trace() + + >>> def test_function(): + ... x = 1 + ... import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace(commands=['p x', 'c']) + + >>> test_function() + 1 + """ + # skip this test if sys.flags.no_site = True; # exit() isn't defined unless there's a site module. diff --git a/Lib/test/test_perf_profiler.py b/Lib/test/test_perf_profiler.py index b68a55259c62e1..672851425ffb53 100644 --- a/Lib/test/test_perf_profiler.py +++ b/Lib/test/test_perf_profiler.py @@ -23,6 +23,15 @@ raise unittest.SkipTest("test crash randomly on ASAN/MSAN/UBSAN build") +def is_jit_build(): + cflags = (sysconfig.get_config_var("PY_CORE_CFLAGS") or '') + return "_Py_JIT" in cflags + + +if is_jit_build(): + raise unittest.SkipTest("Perf support is not available in JIT builds") + + def supports_trampoline_profiling(): perf_trampoline = sysconfig.get_config_var("PY_HAVE_PERF_TRAMPOLINE") if not perf_trampoline: @@ -229,7 +238,7 @@ def is_unwinding_reliable_with_frame_pointers(): cflags = sysconfig.get_config_var("PY_CORE_CFLAGS") if not cflags: return False - return "no-omit-frame-pointer" in cflags and "_Py_JIT" not in cflags + return "no-omit-frame-pointer" in cflags def perf_command_works(): @@ -382,6 +391,7 @@ def baz(n): self.assertNotIn(f"py::bar:{script}", stdout) self.assertNotIn(f"py::baz:{script}", stdout) + @unittest.skipUnless(perf_command_works(), "perf command doesn't work") @unittest.skipUnless( is_unwinding_reliable_with_frame_pointers(), @@ -494,7 +504,9 @@ def _is_perf_version_at_least(major, minor): @unittest.skipUnless(perf_command_works(), "perf command doesn't work") -@unittest.skipUnless(_is_perf_version_at_least(6, 6), "perf command may not work due to a perf bug") +@unittest.skipUnless( + _is_perf_version_at_least(6, 6), "perf command may not work due to a perf bug" +) class TestPerfProfilerWithDwarf(unittest.TestCase, TestPerfProfilerMixin): def run_perf(self, script_dir, script, activate_trampoline=True): if activate_trampoline: diff --git a/Lib/test/test_pydoc/test_pydoc.py b/Lib/test/test_pydoc/test_pydoc.py index 2dba077cdea6a7..776e02f41a1cec 100644 --- a/Lib/test/test_pydoc/test_pydoc.py +++ b/Lib/test/test_pydoc/test_pydoc.py @@ -463,6 +463,14 @@ class BinaryInteger(enum.IntEnum): doc = pydoc.render_doc(BinaryInteger) self.assertIn('BinaryInteger.zero', doc) + def test_slotted_dataclass_with_field_docs(self): + import dataclasses + @dataclasses.dataclass(slots=True) + class My: + x: int = dataclasses.field(doc='Docstring for x') + doc = pydoc.render_doc(My) + self.assertIn('Docstring for x', doc) + def test_mixed_case_module_names_are_lower_cased(self): # issue16484 doc_link = get_pydoc_link(xml.etree.ElementTree) diff --git a/Lib/test/test_pyrepl/test_pyrepl.py b/Lib/test/test_pyrepl/test_pyrepl.py index e816de3720670f..36f940eaea4eac 100644 --- a/Lib/test/test_pyrepl/test_pyrepl.py +++ b/Lib/test/test_pyrepl/test_pyrepl.py @@ -8,7 +8,7 @@ import subprocess import sys import tempfile -from unittest import TestCase, skipUnless +from unittest import TestCase, skipUnless, skipIf from unittest.mock import patch from test.support import force_not_colorized from test.support import SHORT_TIMEOUT @@ -35,6 +35,94 @@ except ImportError: pty = None + +class ReplTestCase(TestCase): + def run_repl( + self, + repl_input: str | list[str], + env: dict | None = None, + *, + cmdline_args: list[str] | None = None, + cwd: str | None = None, + ) -> tuple[str, int]: + temp_dir = None + if cwd is None: + temp_dir = tempfile.TemporaryDirectory(ignore_cleanup_errors=True) + cwd = temp_dir.name + try: + return self._run_repl( + repl_input, env=env, cmdline_args=cmdline_args, cwd=cwd + ) + finally: + if temp_dir is not None: + temp_dir.cleanup() + + def _run_repl( + self, + repl_input: str | list[str], + *, + env: dict | None, + cmdline_args: list[str] | None, + cwd: str, + ) -> tuple[str, int]: + assert pty + master_fd, slave_fd = pty.openpty() + cmd = [sys.executable, "-i", "-u"] + if env is None: + cmd.append("-I") + elif "PYTHON_HISTORY" not in env: + env["PYTHON_HISTORY"] = os.path.join(cwd, ".regrtest_history") + if cmdline_args is not None: + cmd.extend(cmdline_args) + + try: + import termios + except ModuleNotFoundError: + pass + else: + term_attr = termios.tcgetattr(slave_fd) + term_attr[6][termios.VREPRINT] = 0 # pass through CTRL-R + term_attr[6][termios.VINTR] = 0 # pass through CTRL-C + termios.tcsetattr(slave_fd, termios.TCSANOW, term_attr) + + process = subprocess.Popen( + cmd, + stdin=slave_fd, + stdout=slave_fd, + stderr=slave_fd, + cwd=cwd, + text=True, + close_fds=True, + env=env if env else os.environ, + ) + os.close(slave_fd) + if isinstance(repl_input, list): + repl_input = "\n".join(repl_input) + "\n" + os.write(master_fd, repl_input.encode("utf-8")) + + output = [] + while select.select([master_fd], [], [], SHORT_TIMEOUT)[0]: + try: + data = os.read(master_fd, 1024).decode("utf-8") + if not data: + break + except OSError: + break + output.append(data) + else: + os.close(master_fd) + process.kill() + self.fail(f"Timeout while waiting for output, got: {''.join(output)}") + + os.close(master_fd) + try: + exit_code = process.wait(timeout=SHORT_TIMEOUT) + except subprocess.TimeoutExpired: + process.kill() + exit_code = process.wait() + return "".join(output), exit_code + + class TestCursorPosition(TestCase): def prepare_reader(self, events): console = FakeConsole(events) @@ -968,7 +1056,20 @@ def test_bracketed_paste_single_line(self): @skipUnless(pty, "requires pty") -class TestMain(TestCase): +class TestDumbTerminal(ReplTestCase): + def test_dumb_terminal_exits_cleanly(self): + env = os.environ.copy() + env.update({"TERM": "dumb"}) + output, exit_code = self.run_repl("exit()\n", env=env) + self.assertEqual(exit_code, 0) + self.assertIn("warning: can't use pyrepl", output) + self.assertNotIn("Exception", output) + self.assertNotIn("Traceback", output) + + +@skipUnless(pty, "requires pty") +@skipIf((os.environ.get("TERM") or "dumb") == "dumb", "can't use pyrepl in dumb terminal") +class TestMain(ReplTestCase): def setUp(self): # Cleanup from PYTHON* variables to isolate from local # user settings, see #121359. Such variables should be @@ -979,7 +1080,7 @@ def setUp(self): @force_not_colorized def test_exposed_globals_in_repl(self): - pre = "['__annotations__', '__builtins__'" + pre = "['__builtins__'" post = "'__loader__', '__name__', '__package__', '__spec__']" output, exit_code = self.run_repl(["sorted(dir())", "exit()"]) if "can't use pyrepl" in output: @@ -1078,15 +1179,6 @@ def test_inspect_keeps_globals_from_inspected_module(self): } self._run_repl_globals_test(expectations, as_module=True) - def test_dumb_terminal_exits_cleanly(self): - env = os.environ.copy() - env.update({"TERM": "dumb"}) - output, exit_code = self.run_repl("exit()\n", env=env) - self.assertEqual(exit_code, 0) - self.assertIn("warning: can't use pyrepl", output) - self.assertNotIn("Exception", output) - self.assertNotIn("Traceback", output) - @force_not_colorized def test_python_basic_repl(self): env = os.environ.copy() @@ -1209,80 +1301,6 @@ def test_proper_tracebacklimit(self): self.assertIn("in x3", output) self.assertIn("in ", output) - def run_repl( - self, - repl_input: str | list[str], - env: dict | None = None, - *, - cmdline_args: list[str] | None = None, - cwd: str | None = None, - ) -> tuple[str, int]: - temp_dir = None - if cwd is None: - temp_dir = tempfile.TemporaryDirectory(ignore_cleanup_errors=True) - cwd = temp_dir.name - try: - return self._run_repl( - repl_input, env=env, cmdline_args=cmdline_args, cwd=cwd - ) - finally: - if temp_dir is not None: - temp_dir.cleanup() - - def _run_repl( - self, - repl_input: str | list[str], - *, - env: dict | None, - cmdline_args: list[str] | None, - cwd: str, - ) -> tuple[str, int]: - assert pty - master_fd, slave_fd = pty.openpty() - cmd = [sys.executable, "-i", "-u"] - if env is None: - cmd.append("-I") - elif "PYTHON_HISTORY" not in env: - env["PYTHON_HISTORY"] = os.path.join(cwd, ".regrtest_history") - if cmdline_args is not None: - cmd.extend(cmdline_args) - process = subprocess.Popen( - cmd, - stdin=slave_fd, - stdout=slave_fd, - stderr=slave_fd, - cwd=cwd, - text=True, - close_fds=True, - env=env if env else os.environ, - ) - os.close(slave_fd) - if isinstance(repl_input, list): - repl_input = "\n".join(repl_input) + "\n" - os.write(master_fd, repl_input.encode("utf-8")) - - output = [] - while select.select([master_fd], [], [], SHORT_TIMEOUT)[0]: - try: - data = os.read(master_fd, 1024).decode("utf-8") - if not data: - break - except OSError: - break - output.append(data) - else: - os.close(master_fd) - process.kill() - self.fail(f"Timeout while waiting for output, got: {''.join(output)}") - - os.close(master_fd) - try: - exit_code = process.wait(timeout=SHORT_TIMEOUT) - except subprocess.TimeoutExpired: - process.kill() - exit_code = process.wait() - return "".join(output), exit_code - def test_readline_history_file(self): # skip, if readline module is not available readline = import_module('readline') @@ -1305,3 +1323,7 @@ def test_readline_history_file(self): output, exit_code = self.run_repl("exit\n", env=env) self.assertEqual(exit_code, 0) self.assertNotIn("\\040", pathlib.Path(hfile.name).read_text()) + + def test_keyboard_interrupt_after_isearch(self): + output, exit_code = self.run_repl(["\x12", "\x03", "exit"]) + self.assertEqual(exit_code, 0) diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py index 80e1d73b6b2aab..37e54d23b22516 100644 --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -1909,7 +1909,10 @@ def test_unzip_zipfile(self): subprocess.check_output(zip_cmd, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as exc: details = exc.output.decode(errors="replace") - if 'unrecognized option: t' in details: + if any(message in details for message in [ + 'unrecognized option: t', # BusyBox + 'invalid option -- t', # Android + ]): self.skipTest("unzip doesn't support -t") msg = "{}\n\n**Unzip Output**\n{}" self.fail(msg.format(exc, details)) diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py index 9c415bd7d1c4e4..216aa84a8c147b 100644 --- a/Lib/test/test_ssl.py +++ b/Lib/test/test_ssl.py @@ -3,6 +3,7 @@ import sys import unittest import unittest.mock +from ast import literal_eval from test import support from test.support import import_helper from test.support import os_helper @@ -82,21 +83,8 @@ def data_file(*name): CAFILE_NEURONIO = data_file("capath", "4e1295a3.0") CAFILE_CACERT = data_file("capath", "5ed36f99.0") -CERTFILE_INFO = { - 'issuer': ((('countryName', 'XY'),), - (('localityName', 'Castle Anthrax'),), - (('organizationName', 'Python Software Foundation'),), - (('commonName', 'localhost'),)), - 'notAfter': 'Jan 24 04:21:36 2043 GMT', - 'notBefore': 'Nov 25 04:21:36 2023 GMT', - 'serialNumber': '53E14833F7546C29256DD0F034F776C5E983004C', - 'subject': ((('countryName', 'XY'),), - (('localityName', 'Castle Anthrax'),), - (('organizationName', 'Python Software Foundation'),), - (('commonName', 'localhost'),)), - 'subjectAltName': (('DNS', 'localhost'),), - 'version': 3 -} +with open(data_file('keycert.pem.reference')) as file: + CERTFILE_INFO = literal_eval(file.read()) # empty CRL CRLFILE = data_file("revocation.crl") @@ -106,23 +94,8 @@ def data_file(*name): SINGED_CERTFILE_ONLY = data_file("cert3.pem") SIGNED_CERTFILE_HOSTNAME = 'localhost' -SIGNED_CERTFILE_INFO = { - 'OCSP': ('http://testca.pythontest.net/testca/ocsp/',), - 'caIssuers': ('http://testca.pythontest.net/testca/pycacert.cer',), - 'crlDistributionPoints': ('http://testca.pythontest.net/testca/revocation.crl',), - 'issuer': ((('countryName', 'XY'),), - (('organizationName', 'Python Software Foundation CA'),), - (('commonName', 'our-ca-server'),)), - 'notAfter': 'Oct 28 14:23:16 2037 GMT', - 'notBefore': 'Aug 29 14:23:16 2018 GMT', - 'serialNumber': 'CB2D80995A69525C', - 'subject': ((('countryName', 'XY'),), - (('localityName', 'Castle Anthrax'),), - (('organizationName', 'Python Software Foundation'),), - (('commonName', 'localhost'),)), - 'subjectAltName': (('DNS', 'localhost'),), - 'version': 3 -} +with open(data_file('keycert3.pem.reference')) as file: + SIGNED_CERTFILE_INFO = literal_eval(file.read()) SIGNED_CERTFILE2 = data_file("keycert4.pem") SIGNED_CERTFILE2_HOSTNAME = 'fakehostname' diff --git a/Lib/test/test_super.py b/Lib/test/test_super.py index 3ffbe03f0c2f11..1222ec6a3c4109 100644 --- a/Lib/test/test_super.py +++ b/Lib/test/test_super.py @@ -513,6 +513,7 @@ def test___class___modification_multithreaded(self): This should be the case anyways as our test suite sets an audit hook. """ + class Foo: pass @@ -522,7 +523,7 @@ class Bar: thing = Foo() def work(): foo = thing - for _ in range(5000): + for _ in range(200): foo.__class__ = Bar type(foo) foo.__class__ = Foo diff --git a/Lib/test/test_support.py b/Lib/test/test_support.py index e60e5477d32e1f..9a3cf140d81241 100644 --- a/Lib/test/test_support.py +++ b/Lib/test/test_support.py @@ -548,13 +548,14 @@ def test_optim_args_from_interpreter_flags(self): with self.subTest(opts=opts): self.check_options(opts, 'optim_args_from_interpreter_flags') + @unittest.skipIf(support.is_apple_mobile, "Unstable on Apple Mobile") @unittest.skipIf(support.is_emscripten, "Unstable in Emscripten") @unittest.skipIf(support.is_wasi, "Unavailable on WASI") def test_fd_count(self): - # We cannot test the absolute value of fd_count(): on old Linux - # kernel or glibc versions, os.urandom() keeps a FD open on - # /dev/urandom device and Python has 4 FD opens instead of 3. - # Test is unstable on Emscripten. The platform starts and stops + # We cannot test the absolute value of fd_count(): on old Linux kernel + # or glibc versions, os.urandom() keeps a FD open on /dev/urandom + # device and Python has 4 FD opens instead of 3. Test is unstable on + # Emscripten and Apple Mobile platforms; these platforms start and stop # background threads that use pipes and epoll fds. start = os_helper.fd_count() fd = os.open(__file__, os.O_RDONLY) diff --git a/Lib/test/test_time.py b/Lib/test/test_time.py index 293799ff68ea05..530c317a852e77 100644 --- a/Lib/test/test_time.py +++ b/Lib/test/test_time.py @@ -654,8 +654,7 @@ def year4d(y): self.test_year('%04d', func=year4d) def skip_if_not_supported(y): - msg = "strftime() is limited to [1; 9999] with Visual Studio" - # Check that it doesn't crash for year > 9999 + msg = f"strftime() does not support year {y} on this platform" try: time.strftime('%Y', (y,) + (0,) * 8) except ValueError: diff --git a/Lib/test/test_ttk/test_style.py b/Lib/test/test_ttk/test_style.py index 9a04a95dc40d65..eeaf5de2e303f6 100644 --- a/Lib/test/test_ttk/test_style.py +++ b/Lib/test/test_ttk/test_style.py @@ -227,13 +227,13 @@ def test_element_create_image(self): foreground='blue', background='yellow') img3 = tkinter.BitmapImage(master=self.root, file=imgfile, foreground='white', background='black') - style.element_create('Button.button', 'image', + style.element_create('TestButton.button', 'image', img1, ('pressed', img2), ('active', img3), border=(2, 4), sticky='we') - self.assertIn('Button.button', style.element_names()) + self.assertIn('TestButton.button', style.element_names()) - style.layout('Button', [('Button.button', {'sticky': 'news'})]) - b = ttk.Button(self.root, style='Button') + style.layout('TestButton', [('TestButton.button', {'sticky': 'news'})]) + b = ttk.Button(self.root, style='TestButton') b.pack(expand=True, fill='both') self.assertEqual(b.winfo_reqwidth(), 16) self.assertEqual(b.winfo_reqheight(), 16) diff --git a/Lib/test/test_ttk/test_widgets.py b/Lib/test/test_ttk/test_widgets.py index cb210b7d2fc960..88740b18864006 100644 --- a/Lib/test/test_ttk/test_widgets.py +++ b/Lib/test/test_ttk/test_widgets.py @@ -963,8 +963,7 @@ def create(self, **kwargs): return ttk.Scrollbar(self.root, **kwargs) -@add_standard_options(PixelSizeTests if tk_version >= (8, 7) else IntegerSizeTests, - StandardTtkOptionsTests) +@add_standard_options(StandardTtkOptionsTests) class NotebookTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'class', 'cursor', 'height', 'padding', 'style', 'takefocus', 'width', @@ -983,6 +982,20 @@ def setUp(self): def create(self, **kwargs): return ttk.Notebook(self.root, **kwargs) + def test_configure_height(self): + widget = self.create() + if get_tk_patchlevel(self.root) < (8, 6, 15): + self.checkIntegerParam(widget, 'height', 402, -402, 0) + else: + self.checkPixelsParam(widget, 'height', '10c', 402, -402, 0, conv=False) + + def test_configure_width(self): + widget = self.create() + if get_tk_patchlevel(self.root) < (8, 6, 15): + self.checkIntegerParam(widget, 'width', 402, -402, 0) + else: + self.checkPixelsParam(widget, 'width', '10c', 402, -402, 0, conv=False) + def test_tab_identifiers(self): self.nb.forget(0) self.nb.hide(self.child2) diff --git a/Lib/test/test_type_aliases.py b/Lib/test/test_type_aliases.py index 49d6aa810304fb..ebb65d8c6cf81b 100644 --- a/Lib/test/test_type_aliases.py +++ b/Lib/test/test_type_aliases.py @@ -211,6 +211,19 @@ def test_generic(self): self.assertEqual(TA.__value__, list[T]) self.assertEqual(TA.__type_params__, (T,)) self.assertEqual(TA.__module__, __name__) + self.assertIs(type(TA[int]), types.GenericAlias) + + def test_not_generic(self): + TA = TypeAliasType("TA", list[int], type_params=()) + self.assertEqual(TA.__name__, "TA") + self.assertEqual(TA.__value__, list[int]) + self.assertEqual(TA.__type_params__, ()) + self.assertEqual(TA.__module__, __name__) + with self.assertRaisesRegex( + TypeError, + "Only generic type aliases are subscriptable", + ): + TA[int] def test_keywords(self): TA = TypeAliasType(name="TA", value=int) diff --git a/Lib/test/test_type_annotations.py b/Lib/test/test_type_annotations.py index 91082e6b23c04b..257b7fa95dcb76 100644 --- a/Lib/test/test_type_annotations.py +++ b/Lib/test/test_type_annotations.py @@ -375,7 +375,7 @@ class X: with self.assertRaises(NotImplementedError): annotate(annotationlib.Format.FORWARDREF) with self.assertRaises(NotImplementedError): - annotate(annotationlib.Format.SOURCE) + annotate(annotationlib.Format.STRING) with self.assertRaises(NotImplementedError): annotate(None) self.assertEqual(annotate(annotationlib.Format.VALUE), {"x": int}) diff --git a/Lib/test/test_type_params.py b/Lib/test/test_type_params.py index dc0c0d0829f8d3..433b19593bdd04 100644 --- a/Lib/test/test_type_params.py +++ b/Lib/test/test_type_params.py @@ -1440,7 +1440,7 @@ def f[T: int = int, **P = int, *Ts = int](): pass self.assertIs(case(1), int) self.assertIs(annotationlib.call_evaluate_function(case, annotationlib.Format.VALUE), int) self.assertIs(annotationlib.call_evaluate_function(case, annotationlib.Format.FORWARDREF), int) - self.assertEqual(annotationlib.call_evaluate_function(case, annotationlib.Format.SOURCE), 'int') + self.assertEqual(annotationlib.call_evaluate_function(case, annotationlib.Format.STRING), 'int') def test_constraints(self): def f[T: (int, str)](): pass @@ -1451,4 +1451,15 @@ def f[T: (int, str)](): pass self.assertEqual(case.evaluate_constraints(1), (int, str)) self.assertEqual(annotationlib.call_evaluate_function(case.evaluate_constraints, annotationlib.Format.VALUE), (int, str)) self.assertEqual(annotationlib.call_evaluate_function(case.evaluate_constraints, annotationlib.Format.FORWARDREF), (int, str)) - self.assertEqual(annotationlib.call_evaluate_function(case.evaluate_constraints, annotationlib.Format.SOURCE), '(int, str)') + self.assertEqual(annotationlib.call_evaluate_function(case.evaluate_constraints, annotationlib.Format.STRING), '(int, str)') + + def test_const_evaluator(self): + T = TypeVar("T", bound=int) + self.assertEqual(repr(T.evaluate_bound), ">") + + ConstEvaluator = type(T.evaluate_bound) + + with self.assertRaisesRegex(TypeError, r"cannot create '_typing\._ConstEvaluator' instances"): + ConstEvaluator() # This used to segfault. + with self.assertRaisesRegex(TypeError, r"cannot set 'attribute' attribute of immutable type '_typing\._ConstEvaluator'"): + ConstEvaluator.attribute = 1 diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py index 3ac6b97383fcef..2f1f9e86a0bce4 100644 --- a/Lib/test/test_typing.py +++ b/Lib/test/test_typing.py @@ -7059,7 +7059,7 @@ class C: self.assertIsInstance(annos['x'], annotationlib.ForwardRef) self.assertEqual(annos['x'].__arg__, 'undefined') - self.assertEqual(get_type_hints(C, format=annotationlib.Format.SOURCE), + self.assertEqual(get_type_hints(C, format=annotationlib.Format.STRING), {'x': 'undefined'}) @@ -7898,7 +7898,7 @@ class Z(NamedTuple): self.assertEqual(Z.__annotations__, annos) self.assertEqual(Z.__annotate__(annotationlib.Format.VALUE), annos) self.assertEqual(Z.__annotate__(annotationlib.Format.FORWARDREF), annos) - self.assertEqual(Z.__annotate__(annotationlib.Format.SOURCE), {"a": "None", "b": "str"}) + self.assertEqual(Z.__annotate__(annotationlib.Format.STRING), {"a": "None", "b": "str"}) def test_future_annotations(self): code = """ @@ -8241,7 +8241,7 @@ def test_basics_functional_syntax(self): self.assertEqual(Emp.__annotations__, annos) self.assertEqual(Emp.__annotate__(annotationlib.Format.VALUE), annos) self.assertEqual(Emp.__annotate__(annotationlib.Format.FORWARDREF), annos) - self.assertEqual(Emp.__annotate__(annotationlib.Format.SOURCE), {'name': 'str', 'id': 'int'}) + self.assertEqual(Emp.__annotate__(annotationlib.Format.STRING), {'name': 'str', 'id': 'int'}) self.assertEqual(Emp.__total__, True) self.assertEqual(Emp.__required_keys__, {'name', 'id'}) self.assertIsInstance(Emp.__required_keys__, frozenset) @@ -8603,7 +8603,7 @@ class A[T](TypedDict): self.assertEqual(A.__orig_bases__, (TypedDict, Generic[T])) self.assertEqual(A.__mro__, (A, Generic, dict, object)) self.assertEqual(A.__annotations__, {'a': T}) - self.assertEqual(A.__annotate__(annotationlib.Format.SOURCE), {'a': 'T'}) + self.assertEqual(A.__annotate__(annotationlib.Format.STRING), {'a': 'T'}) self.assertEqual(A.__parameters__, (T,)) self.assertEqual(A[str].__parameters__, ()) self.assertEqual(A[str].__args__, (str,)) @@ -8616,7 +8616,7 @@ class A(TypedDict, Generic[T]): self.assertEqual(A.__orig_bases__, (TypedDict, Generic[T])) self.assertEqual(A.__mro__, (A, Generic, dict, object)) self.assertEqual(A.__annotations__, {'a': T}) - self.assertEqual(A.__annotate__(annotationlib.Format.SOURCE), {'a': 'T'}) + self.assertEqual(A.__annotate__(annotationlib.Format.STRING), {'a': 'T'}) self.assertEqual(A.__parameters__, (T,)) self.assertEqual(A[str].__parameters__, ()) self.assertEqual(A[str].__args__, (str,)) @@ -8628,7 +8628,7 @@ class A2(Generic[T], TypedDict): self.assertEqual(A2.__orig_bases__, (Generic[T], TypedDict)) self.assertEqual(A2.__mro__, (A2, Generic, dict, object)) self.assertEqual(A2.__annotations__, {'a': T}) - self.assertEqual(A2.__annotate__(annotationlib.Format.SOURCE), {'a': 'T'}) + self.assertEqual(A2.__annotate__(annotationlib.Format.STRING), {'a': 'T'}) self.assertEqual(A2.__parameters__, (T,)) self.assertEqual(A2[str].__parameters__, ()) self.assertEqual(A2[str].__args__, (str,)) @@ -8640,7 +8640,7 @@ class B(A[KT], total=False): self.assertEqual(B.__orig_bases__, (A[KT],)) self.assertEqual(B.__mro__, (B, Generic, dict, object)) self.assertEqual(B.__annotations__, {'a': T, 'b': KT}) - self.assertEqual(B.__annotate__(annotationlib.Format.SOURCE), {'a': 'T', 'b': 'KT'}) + self.assertEqual(B.__annotate__(annotationlib.Format.STRING), {'a': 'T', 'b': 'KT'}) self.assertEqual(B.__parameters__, (KT,)) self.assertEqual(B.__total__, False) self.assertEqual(B.__optional_keys__, frozenset(['b'])) @@ -8665,7 +8665,7 @@ class C(B[int]): 'b': KT, 'c': int, }) - self.assertEqual(C.__annotate__(annotationlib.Format.SOURCE), { + self.assertEqual(C.__annotate__(annotationlib.Format.STRING), { 'a': 'T', 'b': 'KT', 'c': 'int', @@ -8689,7 +8689,7 @@ class Point3D(Point2DGeneric[T], Generic[T, KT]): 'b': T, 'c': KT, }) - self.assertEqual(Point3D.__annotate__(annotationlib.Format.SOURCE), { + self.assertEqual(Point3D.__annotate__(annotationlib.Format.STRING), { 'a': 'T', 'b': 'T', 'c': 'KT', @@ -8725,7 +8725,7 @@ class WithImplicitAny(B): 'b': KT, 'c': int, }) - self.assertEqual(WithImplicitAny.__annotate__(annotationlib.Format.SOURCE), { + self.assertEqual(WithImplicitAny.__annotate__(annotationlib.Format.STRING), { 'a': 'T', 'b': 'KT', 'c': 'int', @@ -8929,7 +8929,7 @@ class A(TypedDict): A.__annotations__ self.assertEqual( - A.__annotate__(annotationlib.Format.SOURCE), + A.__annotate__(annotationlib.Format.STRING), {'x': 'NotRequired[undefined]', 'y': 'ReadOnly[undefined]', 'z': 'Required[undefined]'}, ) diff --git a/Lib/test/test_unittest/testmock/testhelpers.py b/Lib/test/test_unittest/testmock/testhelpers.py index c9c20f008ca5a2..f260769eb8c35e 100644 --- a/Lib/test/test_unittest/testmock/testhelpers.py +++ b/Lib/test/test_unittest/testmock/testhelpers.py @@ -8,8 +8,10 @@ Mock, ANY, _CallList, patch, PropertyMock, _callable ) +from dataclasses import dataclass, field, InitVar from datetime import datetime from functools import partial +from typing import ClassVar class SomeClass(object): def one(self, a, b): pass @@ -1034,6 +1036,76 @@ def f(a): pass self.assertEqual(mock.mock_calls, []) self.assertEqual(rv.mock_calls, []) + def test_dataclass_post_init(self): + @dataclass + class WithPostInit: + a: int = field(init=False) + b: int = field(init=False) + def __post_init__(self): + self.a = 1 + self.b = 2 + + for mock in [ + create_autospec(WithPostInit, instance=True), + create_autospec(WithPostInit()), + ]: + with self.subTest(mock=mock): + self.assertIsInstance(mock.a, int) + self.assertIsInstance(mock.b, int) + + # Classes do not have these fields: + mock = create_autospec(WithPostInit) + msg = "Mock object has no attribute" + with self.assertRaisesRegex(AttributeError, msg): + mock.a + with self.assertRaisesRegex(AttributeError, msg): + mock.b + + def test_dataclass_default(self): + @dataclass + class WithDefault: + a: int + b: int = 0 + + for mock in [ + create_autospec(WithDefault, instance=True), + create_autospec(WithDefault(1)), + ]: + with self.subTest(mock=mock): + self.assertIsInstance(mock.a, int) + self.assertIsInstance(mock.b, int) + + def test_dataclass_with_method(self): + @dataclass + class WithMethod: + a: int + def b(self) -> int: + return 1 + + for mock in [ + create_autospec(WithMethod, instance=True), + create_autospec(WithMethod(1)), + ]: + with self.subTest(mock=mock): + self.assertIsInstance(mock.a, int) + mock.b.assert_not_called() + + def test_dataclass_with_non_fields(self): + @dataclass + class WithNonFields: + a: ClassVar[int] + b: InitVar[int] + + msg = "Mock object has no attribute" + for mock in [ + create_autospec(WithNonFields, instance=True), + create_autospec(WithNonFields(1)), + ]: + with self.subTest(mock=mock): + with self.assertRaisesRegex(AttributeError, msg): + mock.a + with self.assertRaisesRegex(AttributeError, msg): + mock.b class TestCallList(unittest.TestCase): diff --git a/Lib/typing.py b/Lib/typing.py index 9377e771d60f4b..c924c767042552 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -242,21 +242,10 @@ def _type_repr(obj): typically enough to uniquely identify a type. For everything else, we fall back on repr(obj). """ - # When changing this function, don't forget about - # `_collections_abc._type_repr`, which does the same thing - # and must be consistent with this one. - if isinstance(obj, type): - if obj.__module__ == 'builtins': - return obj.__qualname__ - return f'{obj.__module__}.{obj.__qualname__}' - if obj is ...: - return '...' - if isinstance(obj, types.FunctionType): - return obj.__name__ if isinstance(obj, tuple): # Special case for `repr` of types with `ParamSpec`: return '[' + ', '.join(_type_repr(t) for t in obj) + ']' - return repr(obj) + return annotationlib.value_to_string(obj) def _collect_type_parameters(args, *, enforce_default_ordering: bool = True): @@ -1047,7 +1036,7 @@ def evaluate_forward_ref( * Recursively evaluates forward references nested within the type hint. * Rejects certain objects that are not valid type hints. * Replaces type hints that evaluate to None with types.NoneType. - * Supports the *FORWARDREF* and *SOURCE* formats. + * Supports the *FORWARDREF* and *STRING* formats. *forward_ref* must be an instance of ForwardRef. *owner*, if given, should be the object that holds the annotations that the forward reference @@ -1064,7 +1053,7 @@ def evaluate_forward_ref( if type_params is _sentinel: _deprecation_warning_for_no_type_params_passed("typing.evaluate_forward_ref") type_params = () - if format == annotationlib.Format.SOURCE: + if format == annotationlib.Format.STRING: return forward_ref.__forward_arg__ if forward_ref.__forward_arg__ in _recursive_guard: return forward_ref @@ -2391,7 +2380,7 @@ def get_type_hints(obj, globalns=None, localns=None, include_extras=False, hints = {} for base in reversed(obj.__mro__): ann = annotationlib.get_annotations(base, format=format) - if format is annotationlib.Format.SOURCE: + if format is annotationlib.Format.STRING: hints.update(ann) continue if globalns is None: @@ -2415,7 +2404,7 @@ def get_type_hints(obj, globalns=None, localns=None, include_extras=False, value = _eval_type(value, base_globals, base_locals, base.__type_params__, format=format, owner=obj) hints[name] = value - if include_extras or format is annotationlib.Format.SOURCE: + if include_extras or format is annotationlib.Format.STRING: return hints else: return {k: _strip_annotations(t) for k, t in hints.items()} @@ -2429,7 +2418,7 @@ def get_type_hints(obj, globalns=None, localns=None, include_extras=False, and not hasattr(obj, '__annotate__') ): raise TypeError(f"{obj!r} is not a module, class, or callable.") - if format is annotationlib.Format.SOURCE: + if format is annotationlib.Format.STRING: return hints if globalns is None: @@ -2948,14 +2937,10 @@ def annotate(format): if format in (annotationlib.Format.VALUE, annotationlib.Format.FORWARDREF): return checked_types else: - return _convert_to_source(types) + return annotationlib.annotations_to_string(types) return annotate -def _convert_to_source(types): - return {n: t if isinstance(t, str) else _type_repr(t) for n, t in types.items()} - - # attributes prohibited to set in NamedTuple class syntax _prohibited = frozenset({'__new__', '__init__', '__slots__', '__getnewargs__', '_fields', '_field_defaults', @@ -2987,7 +2972,7 @@ def __new__(cls, typename, bases, ns): def annotate(format): annos = annotationlib.call_annotate_function(original_annotate, format) - if format != annotationlib.Format.SOURCE: + if format != annotationlib.Format.STRING: return {key: _type_check(val, f"field {key} annotation must be a type") for key, val in annos.items()} return annos @@ -3235,13 +3220,13 @@ def __annotate__(format): annos.update(base_annos) if own_annotate is not None: own = annotationlib.call_annotate_function(own_annotate, format, owner=tp_dict) - if format != annotationlib.Format.SOURCE: + if format != annotationlib.Format.STRING: own = { n: _type_check(tp, msg, module=tp_dict.__module__) for n, tp in own.items() } - elif format == annotationlib.Format.SOURCE: - own = _convert_to_source(own_annotations) + elif format == annotationlib.Format.STRING: + own = annotationlib.annotations_to_string(own_annotations) else: own = own_checked_annotations annos.update(own) diff --git a/Lib/unittest/mock.py b/Lib/unittest/mock.py index bb34c7436047ad..21ca061a77c26f 100644 --- a/Lib/unittest/mock.py +++ b/Lib/unittest/mock.py @@ -34,6 +34,7 @@ import pkgutil from inspect import iscoroutinefunction import threading +from dataclasses import fields, is_dataclass from types import CodeType, ModuleType, MethodType from unittest.util import safe_repr from functools import wraps, partial @@ -628,7 +629,9 @@ def __set_side_effect(self, value): side_effect = property(__get_side_effect, __set_side_effect) - def reset_mock(self, visited=None, *, return_value=False, side_effect=False): + def reset_mock(self, visited=None, *, + return_value: bool = False, + side_effect: bool = False): "Restore the mock object to its initial state." if visited is None: visited = [] @@ -2218,7 +2221,7 @@ def mock_add_spec(self, spec, spec_set=False): self._mock_add_spec(spec, spec_set) self._mock_set_magics() - def reset_mock(self, /, *args, return_value=False, **kwargs): + def reset_mock(self, /, *args, return_value: bool = False, **kwargs): if ( return_value and self._mock_name @@ -2754,7 +2757,15 @@ def create_autospec(spec, spec_set=False, instance=False, _parent=None, raise InvalidSpecError(f'Cannot autospec a Mock object. ' f'[object={spec!r}]') is_async_func = _is_async_func(spec) - _kwargs = {'spec': spec} + + entries = [(entry, _missing) for entry in dir(spec)] + if is_type and instance and is_dataclass(spec): + dataclass_fields = fields(spec) + entries.extend((f.name, f.type) for f in dataclass_fields) + _kwargs = {'spec': [f.name for f in dataclass_fields]} + else: + _kwargs = {'spec': spec} + if spec_set: _kwargs = {'spec_set': spec} elif spec is None: @@ -2811,7 +2822,7 @@ def create_autospec(spec, spec_set=False, instance=False, _parent=None, _name='()', _parent=mock, wraps=wrapped) - for entry in dir(spec): + for entry, original in entries: if _is_magic(entry): # MagicMock already does the useful magic methods for us continue @@ -2825,10 +2836,11 @@ def create_autospec(spec, spec_set=False, instance=False, _parent=None, # AttributeError on being fetched? # we could be resilient against it, or catch and propagate the # exception when the attribute is fetched from the mock - try: - original = getattr(spec, entry) - except AttributeError: - continue + if original is _missing: + try: + original = getattr(spec, entry) + except AttributeError: + continue child_kwargs = {'spec': original} # Wrap child attributes also. diff --git a/Mac/BuildScript/build-installer.py b/Mac/BuildScript/build-installer.py index b97738836d92cc..f5f0ed44884142 100755 --- a/Mac/BuildScript/build-installer.py +++ b/Mac/BuildScript/build-installer.py @@ -264,10 +264,10 @@ def library_recipes(): tk_patches = ['backport_gh71383_fix.patch', 'tk868_on_10_8_10_9.patch', 'backport_gh110950_fix.patch'] else: - tcl_tk_ver='8.6.14' - tcl_checksum='5880225babf7954c58d4fb0f5cf6279104ce1cd6aa9b71e9a6322540e1c4de66' + tcl_tk_ver='8.6.15' + tcl_checksum='861e159753f2e2fbd6ec1484103715b0be56be3357522b858d3cbb5f893ffef1' - tk_checksum='8ffdb720f47a6ca6107eac2dd877e30b0ef7fac14f3a84ebbd0b3612cee41a94' + tk_checksum='550969f35379f952b3020f3ab7b9dd5bfd11c1ef7c9b7c6a75f5c49aca793fec' tk_patches = [] diff --git a/Misc/ACKS b/Misc/ACKS index ef0f403950255b..d94cbacf888468 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -1552,7 +1552,9 @@ Lisa Roach Carl Robben Ben Roberts Mark Roberts +Tony Roberts Andy Robinson +Izan "TizzySaurus" Robinson Jim Robinson Yolanda Robla Daniel Rocco diff --git a/Misc/NEWS.d/next/Build/2024-05-22-13-18-02.gh-issue-119400.WEt83v.rst b/Misc/NEWS.d/next/Build/2024-05-22-13-18-02.gh-issue-119400.WEt83v.rst new file mode 100644 index 00000000000000..b4029f205797e4 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2024-05-22-13-18-02.gh-issue-119400.WEt83v.rst @@ -0,0 +1,2 @@ +``make_ssl_certs``, the script that prepares certificate data for the +test suite, now allows specifying expiration dates. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-07-30-11-41-35.gh-issue-122445.Rq0bjS.rst b/Misc/NEWS.d/next/Core and Builtins/2024-07-30-11-41-35.gh-issue-122445.Rq0bjS.rst index f5aa07c6513ea9..cb9dabbc71706f 100644 --- a/Misc/NEWS.d/next/Core and Builtins/2024-07-30-11-41-35.gh-issue-122445.Rq0bjS.rst +++ b/Misc/NEWS.d/next/Core and Builtins/2024-07-30-11-41-35.gh-issue-122445.Rq0bjS.rst @@ -1 +1 @@ -Add only fields which are modified via self.* to :attr:`~class.__static_attributes__`. +Add only fields which are modified via self.* to :attr:`~type.__static_attributes__`. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-09-23-23-06-19.gh-issue-124285.mahGTg.rst b/Misc/NEWS.d/next/Core and Builtins/2024-09-23-23-06-19.gh-issue-124285.mahGTg.rst new file mode 100644 index 00000000000000..a6dec66a743f92 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-09-23-23-06-19.gh-issue-124285.mahGTg.rst @@ -0,0 +1,2 @@ +Fix bug where ``bool(a)`` can be invoked more than once during the +evaluation of a compound boolean expression. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-08-27-21-44-14.gh-issue-116017.ZY3yBY.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-08-27-21-44-14.gh-issue-116017.ZY3yBY.rst new file mode 100644 index 00000000000000..de62875e16475d --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-08-27-21-44-14.gh-issue-116017.ZY3yBY.rst @@ -0,0 +1,2 @@ +Improved JIT memory consumption by periodically freeing memory used by infrequently-executed code. +This change is especially likely to improve the memory footprint of long-running programs. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-09-02-20-36-45.gh-issue-123339.QcmpSs.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-02-20-36-45.gh-issue-123339.QcmpSs.rst new file mode 100644 index 00000000000000..25b47d5fbaefa5 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-02-20-36-45.gh-issue-123339.QcmpSs.rst @@ -0,0 +1,3 @@ +Setting the :attr:`!__module__` attribute for a class now removes the +``__firstlineno__`` item from the type's dict, so they will no longer be +inconsistent. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-09-23-15-23-14.gh-issue-123856.yrgJ9m.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-23-15-23-14.gh-issue-123856.yrgJ9m.rst new file mode 100644 index 00000000000000..b5f423f3ff1c96 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-23-15-23-14.gh-issue-123856.yrgJ9m.rst @@ -0,0 +1,2 @@ +Fix PyREPL failure when a keyboard interrupt is triggered after using a +history search diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-09-25-11-53-22.gh-issue-124442.EXC1Ve.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-25-11-53-22.gh-issue-124442.EXC1Ve.rst new file mode 100644 index 00000000000000..58e79f22ac0f90 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-25-11-53-22.gh-issue-124442.EXC1Ve.rst @@ -0,0 +1,2 @@ +Fix nondeterminism in compilation by sorting the value of +:attr:`~type.__static_attributes__`. Patch by kp2pml30. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-09-25-14-45-56.gh-issue-124513.ywiXtr.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-25-14-45-56.gh-issue-124513.ywiXtr.rst new file mode 100644 index 00000000000000..691e03b3b98e7a --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-25-14-45-56.gh-issue-124513.ywiXtr.rst @@ -0,0 +1,2 @@ +Fix a crash in FrameLocalsProxy constructor: check the number of arguments. +Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-09-26-12-19-13.gh-issue-124547.P_SHfU.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-26-12-19-13.gh-issue-124547.P_SHfU.rst new file mode 100644 index 00000000000000..1005c651849f45 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-26-12-19-13.gh-issue-124547.P_SHfU.rst @@ -0,0 +1,3 @@ +When deallocating an object with inline values whose ``__dict__`` is still +live: if memory allocation for the inline values fails, clear the +dictionary. Prevents an interpreter crash. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-09-26-13-25-01.gh-issue-119180.k_JCX0.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-26-13-25-01.gh-issue-119180.k_JCX0.rst new file mode 100644 index 00000000000000..4cdbb205c962c4 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-26-13-25-01.gh-issue-119180.k_JCX0.rst @@ -0,0 +1,2 @@ +The ``__main__`` module no longer always contains an ``__annotations__`` +dictionary in its global namespace. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-09-26-17-55-34.gh-issue-116510.dhn8w8.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-26-17-55-34.gh-issue-116510.dhn8w8.rst new file mode 100644 index 00000000000000..fc3f8af72d87bf --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-26-17-55-34.gh-issue-116510.dhn8w8.rst @@ -0,0 +1,3 @@ +Fix a bug that can cause a crash when sub-interpreters use "basic" +single-phase extension modules. Shared objects could refer to PyGC_Head +nodes that had been freed as part of interpreter cleanup. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-09-26-18-21-06.gh-issue-116510.FacUWO.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-26-18-21-06.gh-issue-116510.FacUWO.rst new file mode 100644 index 00000000000000..e3741321006548 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-26-18-21-06.gh-issue-116510.FacUWO.rst @@ -0,0 +1,5 @@ +Fix a crash caused by immortal interned strings being shared between +sub-interpreters that use basic single-phase init. In that case, the string +can be used by an interpreter that outlives the interpreter that created and +interned it. For interpreters that share obmalloc state, also share the +interned dict with the main interpreter. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-09-27-17-18-53.gh-issue-124642.OCjhBJ.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-27-17-18-53.gh-issue-124642.OCjhBJ.rst new file mode 100644 index 00000000000000..29763844a9f592 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-27-17-18-53.gh-issue-124642.OCjhBJ.rst @@ -0,0 +1 @@ +Fixed scalability issue in free-threaded builds for lock-free reads from dictionaries in multi-threaded scenarios diff --git a/Misc/NEWS.d/next/Documentation/2024-07-19-12-22-48.gh-issue-121277.wF_zKd.rst b/Misc/NEWS.d/next/Documentation/2024-07-19-12-22-48.gh-issue-121277.wF_zKd.rst new file mode 100644 index 00000000000000..60f75ae0c21326 --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2024-07-19-12-22-48.gh-issue-121277.wF_zKd.rst @@ -0,0 +1,2 @@ +Writers of CPython's documentation can now use ``next`` as the version for +the ``versionchanged``, ``versionadded``, ``deprecated`` directives. diff --git a/Misc/NEWS.d/next/Documentation/2024-09-24-11-52-36.gh-issue-124457.yrCjSV.rst b/Misc/NEWS.d/next/Documentation/2024-09-24-11-52-36.gh-issue-124457.yrCjSV.rst new file mode 100644 index 00000000000000..f9da7b8a5724f5 --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2024-09-24-11-52-36.gh-issue-124457.yrCjSV.rst @@ -0,0 +1,2 @@ +Remove coverity scan from the CPython repo. It has not been used since 2020 +and is currently unmaintained. diff --git a/Misc/NEWS.d/next/Documentation/2024-09-27-16-47-48.gh-issue-124720.nVSTVb.rst b/Misc/NEWS.d/next/Documentation/2024-09-27-16-47-48.gh-issue-124720.nVSTVb.rst new file mode 100644 index 00000000000000..6bef1e4158400b --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2024-09-27-16-47-48.gh-issue-124720.nVSTVb.rst @@ -0,0 +1,2 @@ +Update "Using Python on a Mac" section of the "Python Setup and Usage" +document and include information on installing free-threading support. diff --git a/Misc/NEWS.d/next/Library/2021-08-24-19-37-46.bpo-44864.KzxaDh.rst b/Misc/NEWS.d/next/Library/2021-08-24-19-37-46.bpo-44864.KzxaDh.rst new file mode 100644 index 00000000000000..9610fa90ef0a98 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2021-08-24-19-37-46.bpo-44864.KzxaDh.rst @@ -0,0 +1 @@ +Do not translate user-provided strings in :class:`argparse.ArgumentParser`. diff --git a/Misc/NEWS.d/next/Library/2023-06-16-14-52-00.gh-issue-102450.MfeR6A.rst b/Misc/NEWS.d/next/Library/2023-06-16-14-52-00.gh-issue-102450.MfeR6A.rst new file mode 100644 index 00000000000000..abfad5fa63b777 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-06-16-14-52-00.gh-issue-102450.MfeR6A.rst @@ -0,0 +1,2 @@ +Add missing ISO-8601 24:00 alternative to midnight of next day to :meth:`datetime.datetime.fromisoformat` and :meth:`datetime.time.fromisoformat`. +Patch by Izan "TizzySaurus" Robinson (tizzysaurus@gmail.com) diff --git a/Misc/NEWS.d/next/Library/2024-01-14-11-43-31.gh-issue-113878.dmEIN3.rst b/Misc/NEWS.d/next/Library/2024-01-14-11-43-31.gh-issue-113878.dmEIN3.rst new file mode 100644 index 00000000000000..8e1937ab73c31b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-01-14-11-43-31.gh-issue-113878.dmEIN3.rst @@ -0,0 +1,9 @@ +Add *doc* parameter to :func:`dataclasses.field`, so it can be stored and +shown as a documentation / metadata. If ``@dataclass(slots=True)`` is used, +then the supplied string is availabl in the :attr:`~object.__slots__` dict. +Otherwise, the supplied string is only available in the corresponding +:class:`dataclasses.Field` object. + +In order to support this feature we are changing the ``__slots__`` format +in dataclasses from :class:`tuple` to :class:`dict` +when documentation / metadata is present. diff --git a/Misc/NEWS.d/next/Library/2024-05-25-00-54-26.gh-issue-119127.LpPvag.rst b/Misc/NEWS.d/next/Library/2024-05-25-00-54-26.gh-issue-119127.LpPvag.rst new file mode 100644 index 00000000000000..e47e2ae89dbff0 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-05-25-00-54-26.gh-issue-119127.LpPvag.rst @@ -0,0 +1,2 @@ +Positional arguments of :func:`functools.partial` objects +now support placeholders via :data:`functools.Placeholder`. diff --git a/Misc/NEWS.d/next/Library/2024-06-08-03-29-01.gh-issue-120254.h682ke.rst b/Misc/NEWS.d/next/Library/2024-06-08-03-29-01.gh-issue-120254.h682ke.rst new file mode 100644 index 00000000000000..33ef1c91591c54 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-06-08-03-29-01.gh-issue-120254.h682ke.rst @@ -0,0 +1 @@ +Added ``commands`` argument to :func:`pdb.set_trace` which allows users to send debugger commands from the source file. diff --git a/Misc/NEWS.d/next/Library/2024-06-15-23-38-36.gh-issue-120284.HwsAtY.rst b/Misc/NEWS.d/next/Library/2024-06-15-23-38-36.gh-issue-120284.HwsAtY.rst new file mode 100644 index 00000000000000..a2a6883c3d7686 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-06-15-23-38-36.gh-issue-120284.HwsAtY.rst @@ -0,0 +1,2 @@ +Allow :meth:`asyncio.Runner.run` to accept :term:`awaitable` +objects instead of simply :term:`coroutine`\s. diff --git a/Misc/NEWS.d/next/Library/2024-06-19-19-53-42.gh-issue-41431.gnkUc5.rst b/Misc/NEWS.d/next/Library/2024-06-19-19-53-42.gh-issue-41431.gnkUc5.rst new file mode 100644 index 00000000000000..18e3506a60c455 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-06-19-19-53-42.gh-issue-41431.gnkUc5.rst @@ -0,0 +1,2 @@ +Add :meth:`datetime.time.strptime` and :meth:`datetime.date.strptime`. +Contributed by Wannes Boeykens. diff --git a/Misc/NEWS.d/next/Library/2024-09-02-20-34-04.gh-issue-123339.czgcSu.rst b/Misc/NEWS.d/next/Library/2024-09-02-20-34-04.gh-issue-123339.czgcSu.rst new file mode 100644 index 00000000000000..e388541f1c2c19 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-09-02-20-34-04.gh-issue-123339.czgcSu.rst @@ -0,0 +1,4 @@ +Fix :func:`inspect.getsource` for classes in :mod:`collections.abc` and +:mod:`decimal` (for pure Python implementation) modules. +:func:`inspect.getcomments` now raises OSError instead of IndexError if the +``__firstlineno__`` value for a class is out of bound. diff --git a/Misc/NEWS.d/next/Library/2024-09-06-01-35-11.gh-issue-123756.Ozbhke.rst b/Misc/NEWS.d/next/Library/2024-09-06-01-35-11.gh-issue-123756.Ozbhke.rst new file mode 100644 index 00000000000000..258dd591fce767 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-09-06-01-35-11.gh-issue-123756.Ozbhke.rst @@ -0,0 +1 @@ +Added a new argument ``mode`` to :class:`pdb.Pdb`. Only allow :mod:`pdb` from command line to use ``restart`` command. diff --git a/Misc/NEWS.d/next/Library/2024-09-19-00-09-48.gh-issue-84559.IrxvQe.rst b/Misc/NEWS.d/next/Library/2024-09-19-00-09-48.gh-issue-84559.IrxvQe.rst new file mode 100644 index 00000000000000..a4428e20f3ccdd --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-09-19-00-09-48.gh-issue-84559.IrxvQe.rst @@ -0,0 +1,5 @@ +The default :mod:`multiprocessing` start method on Linux and other POSIX +systems has been changed away from often unsafe ``"fork"`` to ``"forkserver"`` +(when the platform supports sending file handles over pipes as most do) or +``"spawn"``. Mac and Windows are unchanged as they already default to +``"spawn"``. diff --git a/Misc/NEWS.d/next/Library/2024-09-23-17-33-47.gh-issue-104860.O86OSc.rst b/Misc/NEWS.d/next/Library/2024-09-23-17-33-47.gh-issue-104860.O86OSc.rst new file mode 100644 index 00000000000000..707c4d651cb5e6 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-09-23-17-33-47.gh-issue-104860.O86OSc.rst @@ -0,0 +1,2 @@ +Fix disallowing abbreviation of single-dash long options in :mod:`argparse` +with ``allow_abbrev=False``. diff --git a/Misc/NEWS.d/next/Library/2024-09-23-18-18-23.gh-issue-124309.iFcarA.rst b/Misc/NEWS.d/next/Library/2024-09-23-18-18-23.gh-issue-124309.iFcarA.rst new file mode 100644 index 00000000000000..89610fa44bf743 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-09-23-18-18-23.gh-issue-124309.iFcarA.rst @@ -0,0 +1 @@ +Fixed :exc:`AssertionError` when using :func:`!asyncio.staggered.staggered_race` with :attr:`asyncio.eager_task_factory`. diff --git a/Misc/NEWS.d/next/Library/2024-09-23-18-26-17.gh-issue-90562.Yj566G.rst b/Misc/NEWS.d/next/Library/2024-09-23-18-26-17.gh-issue-90562.Yj566G.rst new file mode 100644 index 00000000000000..7a389fefc6c54b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-09-23-18-26-17.gh-issue-90562.Yj566G.rst @@ -0,0 +1,3 @@ +Modify dataclasses to support zero-argument super() when ``slots=True`` is +specified. This works by modifying all references to ``__class__`` to point +to the newly created class. diff --git a/Misc/NEWS.d/next/Library/2024-09-24-00-01-24.gh-issue-124400.0XCgfe.rst b/Misc/NEWS.d/next/Library/2024-09-24-00-01-24.gh-issue-124400.0XCgfe.rst new file mode 100644 index 00000000000000..25ee01e3108bf8 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-09-24-00-01-24.gh-issue-124400.0XCgfe.rst @@ -0,0 +1 @@ +Fixed a :mod:`pdb` bug where ``until`` has no effect when it appears in a ``commands`` sequence. Also avoid printing the frame information at a breakpoint that has a command list containing a command that resumes execution. diff --git a/Misc/NEWS.d/next/Library/2024-09-24-12-34-48.gh-issue-124345.s3vKql.rst b/Misc/NEWS.d/next/Library/2024-09-24-12-34-48.gh-issue-124345.s3vKql.rst new file mode 100644 index 00000000000000..dff902d8c6139a --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-09-24-12-34-48.gh-issue-124345.s3vKql.rst @@ -0,0 +1,2 @@ +:mod:`argparse` vim supports abbreviated single-dash long options separated +by ``=`` from its value. diff --git a/Misc/NEWS.d/next/Library/2024-09-24-13-32-16.gh-issue-124176.6hmOPz.rst b/Misc/NEWS.d/next/Library/2024-09-24-13-32-16.gh-issue-124176.6hmOPz.rst new file mode 100644 index 00000000000000..38c030668b6b42 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-09-24-13-32-16.gh-issue-124176.6hmOPz.rst @@ -0,0 +1,4 @@ +Add support for :func:`dataclasses.dataclass` in +:func:`unittest.mock.create_autospec`. Now ``create_autospec`` will check +for potential dataclasses and use :func:`dataclasses.fields` function to +retrieve the spec information. diff --git a/Misc/NEWS.d/next/Library/2024-09-24-19-32-14.gh-issue-123014.zVcfkZ.rst b/Misc/NEWS.d/next/Library/2024-09-24-19-32-14.gh-issue-123014.zVcfkZ.rst new file mode 100644 index 00000000000000..53dbabd9480ddb --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-09-24-19-32-14.gh-issue-123014.zVcfkZ.rst @@ -0,0 +1,3 @@ +:func:`os.pidfd_open` and :func:`signal.pidfd_send_signal` are now +unavailable when building against Android API levels older than 31, since +the underlying system calls may cause a crash. diff --git a/Misc/NEWS.d/next/Library/2024-09-24-21-15-27.gh-issue-123017.dSAr2f.rst b/Misc/NEWS.d/next/Library/2024-09-24-21-15-27.gh-issue-123017.dSAr2f.rst new file mode 100644 index 00000000000000..45fe4786fa6563 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-09-24-21-15-27.gh-issue-123017.dSAr2f.rst @@ -0,0 +1,2 @@ +Due to unreliable results on some devices, :func:`time.strftime` no longer +accepts negative years on Android. diff --git a/Misc/NEWS.d/next/Library/2024-09-24-22-38-51.gh-issue-123884.iEPTK4.rst b/Misc/NEWS.d/next/Library/2024-09-24-22-38-51.gh-issue-123884.iEPTK4.rst new file mode 100644 index 00000000000000..55f1d4b41125c3 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-09-24-22-38-51.gh-issue-123884.iEPTK4.rst @@ -0,0 +1,4 @@ +Fixed bug in itertools.tee() handling of other tee inputs (a tee in a tee). +The output now has the promised *n* independent new iterators. Formerly, +the first iterator was identical (not independent) to the input iterator. +This would sometimes give surprising results. diff --git a/Misc/NEWS.d/next/Library/2024-09-25-10-25-57.gh-issue-53834.uyIckw.rst b/Misc/NEWS.d/next/Library/2024-09-25-10-25-57.gh-issue-53834.uyIckw.rst new file mode 100644 index 00000000000000..20ba1534f5e99d --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-09-25-10-25-57.gh-issue-53834.uyIckw.rst @@ -0,0 +1,4 @@ +Fix support of arguments with :ref:`choices` in :mod:`argparse`. Positional +arguments with :ref:`nargs` equal to ``'?'`` or ``'*'`` no longer check +:ref:`default` against ``choices``. Optional arguments with ``nargs`` equal +to ``'?'`` no longer check :ref:`const` against ``choices``. diff --git a/Misc/NEWS.d/next/Library/2024-09-25-12-14-58.gh-issue-124498.Ozxs55.rst b/Misc/NEWS.d/next/Library/2024-09-25-12-14-58.gh-issue-124498.Ozxs55.rst new file mode 100644 index 00000000000000..4dbf4eb709733d --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-09-25-12-14-58.gh-issue-124498.Ozxs55.rst @@ -0,0 +1,2 @@ +Fix :class:`typing.TypeAliasType` not to be generic, when ``type_params`` is +an empty tuple. diff --git a/Misc/NEWS.d/next/Library/2024-09-25-18-08-29.gh-issue-80259.kO5Tw7.rst b/Misc/NEWS.d/next/Library/2024-09-25-18-08-29.gh-issue-80259.kO5Tw7.rst new file mode 100644 index 00000000000000..bb451cdd9ae44c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-09-25-18-08-29.gh-issue-80259.kO5Tw7.rst @@ -0,0 +1,2 @@ +Fix :mod:`argparse` support of positional arguments with ``nargs='?'``, +``default=argparse.SUPPRESS`` and specified ``type``. diff --git a/Misc/NEWS.d/next/Library/2024-09-25-18-34-48.gh-issue-124538.nXZk4R.rst b/Misc/NEWS.d/next/Library/2024-09-25-18-34-48.gh-issue-124538.nXZk4R.rst new file mode 100644 index 00000000000000..33ae037ae56b0b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-09-25-18-34-48.gh-issue-124538.nXZk4R.rst @@ -0,0 +1 @@ +Fixed crash when using :func:`gc.get_referents` on a capsule object. diff --git a/Misc/NEWS.d/next/Library/2024-09-26-09-18-09.gh-issue-61181.dwjmch.rst b/Misc/NEWS.d/next/Library/2024-09-26-09-18-09.gh-issue-61181.dwjmch.rst new file mode 100644 index 00000000000000..801a5fdd4abd4f --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-09-26-09-18-09.gh-issue-61181.dwjmch.rst @@ -0,0 +1,2 @@ +Fix support of :ref:`choices` with string value in :mod:`argparse`. Substrings +of the specified string no longer considered valid values. diff --git a/Misc/NEWS.d/next/Library/2024-09-26-22-14-12.gh-issue-58573.hozbm9.rst b/Misc/NEWS.d/next/Library/2024-09-26-22-14-12.gh-issue-58573.hozbm9.rst new file mode 100644 index 00000000000000..37d64ee536ff49 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-09-26-22-14-12.gh-issue-58573.hozbm9.rst @@ -0,0 +1,2 @@ +Fix conflicts between abbreviated long options in the parent parser and +subparsers in :mod:`argparse`. diff --git a/Misc/NEWS.d/next/Library/2024-09-27-15-16-04.gh-issue-116850.dBkR0-.rst b/Misc/NEWS.d/next/Library/2024-09-27-15-16-04.gh-issue-116850.dBkR0-.rst new file mode 100644 index 00000000000000..62639a16c52aa0 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-09-27-15-16-04.gh-issue-116850.dBkR0-.rst @@ -0,0 +1,2 @@ +Fix :mod:`argparse` for namespaces with not directly writable dict (e.g. +classes). diff --git a/Misc/NEWS.d/next/Tests/2024-09-25-12-39-34.gh-issue-124378.Ywwgia.rst b/Misc/NEWS.d/next/Tests/2024-09-25-12-39-34.gh-issue-124378.Ywwgia.rst new file mode 100644 index 00000000000000..9ddcca0eb6036d --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2024-09-25-12-39-34.gh-issue-124378.Ywwgia.rst @@ -0,0 +1 @@ +Updated ``test_ttk`` to pass with Tcl/Tk 8.6.15. diff --git a/Misc/NEWS.d/next/Windows/2024-03-19-19-04-56.gh-issue-116145.srVT3d.rst b/Misc/NEWS.d/next/Windows/2024-03-19-19-04-56.gh-issue-116145.srVT3d.rst deleted file mode 100644 index 7f840b0556048a..00000000000000 --- a/Misc/NEWS.d/next/Windows/2024-03-19-19-04-56.gh-issue-116145.srVT3d.rst +++ /dev/null @@ -1 +0,0 @@ -Updated bundled Tcl/Tk to 8.6.14. diff --git a/Misc/NEWS.d/next/Windows/2024-09-24-19-04-56.gh-issue-124448.srVT3d.rst b/Misc/NEWS.d/next/Windows/2024-09-24-19-04-56.gh-issue-124448.srVT3d.rst new file mode 100644 index 00000000000000..ca9845a8daea9d --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2024-09-24-19-04-56.gh-issue-124448.srVT3d.rst @@ -0,0 +1 @@ +Updated bundled Tcl/Tk to 8.6.15. diff --git a/Misc/NEWS.d/next/Windows/2024-09-27-13-40-25.gh-issue-124609.WaKk8G.rst b/Misc/NEWS.d/next/Windows/2024-09-27-13-40-25.gh-issue-124609.WaKk8G.rst new file mode 100644 index 00000000000000..203868a8fee39c --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2024-09-27-13-40-25.gh-issue-124609.WaKk8G.rst @@ -0,0 +1 @@ +Fix ``_Py_ThreadId`` for Windows builds using MinGW. Patch by Tony Roberts. diff --git a/Misc/NEWS.d/next/Windows/2024-09-27-15-07-30.gh-issue-124487.7LrwHC.rst b/Misc/NEWS.d/next/Windows/2024-09-27-15-07-30.gh-issue-124487.7LrwHC.rst new file mode 100644 index 00000000000000..93fb68d28c702e --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2024-09-27-15-07-30.gh-issue-124487.7LrwHC.rst @@ -0,0 +1 @@ +Increases Windows required OS and API level to Windows 10. diff --git a/Misc/NEWS.d/next/macOS/2024-09-24-10-48-46.gh-issue-124448.bFMrS6.rst b/Misc/NEWS.d/next/macOS/2024-09-24-10-48-46.gh-issue-124448.bFMrS6.rst new file mode 100644 index 00000000000000..6d57aa1ee190d6 --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2024-09-24-10-48-46.gh-issue-124448.bFMrS6.rst @@ -0,0 +1 @@ +Update bundled Tcl/Tk in macOS installer to 8.6.15. diff --git a/Misc/README b/Misc/README index 3dab768ba1a7a4..cbad9b72dc713c 100644 --- a/Misc/README +++ b/Misc/README @@ -17,7 +17,6 @@ python.man UNIX man page for the python interpreter python.pc.in Package configuration info template for pkg-config README The file you're reading now README.AIX Information about using Python on AIX -README.coverity Information about running Coverity's Prevent on Python README.valgrind Information for Valgrind users, see valgrind-python.supp SpecialBuilds.txt Describes extra symbols you can set for debug builds svnmap.txt Map of old SVN revs and branches to hg changeset ids, diff --git a/Misc/README.coverity b/Misc/README.coverity deleted file mode 100644 index f5e1bf6f28d245..00000000000000 --- a/Misc/README.coverity +++ /dev/null @@ -1,22 +0,0 @@ - -Coverity has a static analysis tool (Prevent) which is similar to Klocwork. -They run their tool on the Python source code (SVN head) on a daily basis. -The results are available at: - - http://scan.coverity.com/ - -About 20 people have access to the analysis reports. Other -people can be added by request. - -Prevent was first run on the Python 2.5 source code in March 2006. -There were originally about 100 defects reported. Some of these -were false positives. Over 70 issues were uncovered. - -Each warning has a unique id and comments that can be made on it. -When checking in changes due to a warning, the unique id -as reported by the tool was added to the SVN commit message. - -False positives were annotated so that the comments can -be reviewed and reversed if the analysis was incorrect. - -Contact python-dev@python.org for more information. diff --git a/Misc/coverity_model.c b/Misc/coverity_model.c deleted file mode 100644 index 90c72c7baa3f9e..00000000000000 --- a/Misc/coverity_model.c +++ /dev/null @@ -1,179 +0,0 @@ -/* Coverity Scan model - * - * This is a modeling file for Coverity Scan. Modeling helps to avoid false - * positives. - * - * - A model file can't import any header files. - * - Therefore only some built-in primitives like int, char and void are - * available but not wchar_t, NULL etc. - * - Modeling doesn't need full structs and typedefs. Rudimentary structs - * and similar types are sufficient. - * - An uninitialized local pointer is not an error. It signifies that the - * variable could be either NULL or have some data. - * - * Coverity Scan doesn't pick up modifications automatically. The model file - * must be uploaded by an admin in the analysis settings of - * http://scan.coverity.com/projects/200 - */ - -/* dummy definitions, in most cases struct fields aren't required. */ - -#define NULL (void *)0 -#define assert(op) /* empty */ -typedef int sdigit; -typedef long Py_ssize_t; -typedef unsigned short wchar_t; -typedef struct {} PyObject; -typedef struct {} grammar; -typedef struct {} DIR; -typedef struct {} RFILE; - -/* Python/pythonrun.c - * resource leak false positive */ - -void Py_FatalError(const char *msg) { - __coverity_panic__(); -} - -/* Objects/longobject.c - * NEGATIVE_RETURNS false positive */ - -static PyObject *get_small_int(sdigit ival) -{ - /* Never returns NULL */ - PyObject *p; - assert(p != NULL); - return p; -} - -PyObject *PyLong_FromLong(long ival) -{ - PyObject *p; - int maybe; - - if ((ival >= -5) && (ival < 257 + 5)) { - p = get_small_int(ival); - assert(p != NULL); - return p; - } - if (maybe) - return p; - else - return NULL; -} - -PyObject *PyLong_FromLongLong(long long ival) -{ - return PyLong_FromLong((long)ival); -} - -PyObject *PyLong_FromSsize_t(Py_ssize_t ival) -{ - return PyLong_FromLong((long)ival); -} - -/* tainted sinks - * - * Coverity considers argv, environ, read() data etc as tainted. - */ - -PyObject *PyErr_SetFromErrnoWithFilename(PyObject *exc, const char *filename) -{ - __coverity_tainted_data_sink__(filename); - return NULL; -} - -/* Python/fileutils.c */ -wchar_t *Py_DecodeLocale(const char* arg, size_t *size) -{ - wchar_t *w; - __coverity_tainted_data_sink__(arg); - __coverity_tainted_data_sink__(size); - return w; -} - -/* Python/marshal.c */ - -static Py_ssize_t r_string(char *s, Py_ssize_t n, RFILE *p) -{ - __coverity_tainted_string_argument__(s); - return 0; -} - -static long r_long(RFILE *p) -{ - long l; - unsigned char buffer[4]; - - r_string((char *)buffer, 4, p); - __coverity_tainted_string_sanitize_content__(buffer); - l = (long)buffer; - return l; -} - -/* Coverity doesn't understand that fdopendir() may take ownership of fd. */ - -DIR *fdopendir(int fd) -{ - DIR *d; - if (d) { - __coverity_close__(fd); - } - return d; -} - -/* Modules/_datetime.c - * - * Coverity thinks that the input values for these function come from a - * tainted source PyDateTime_DATE_GET_* macros use bit shifting. - */ -static PyObject * -build_struct_time(int y, int m, int d, int hh, int mm, int ss, int dstflag) -{ - PyObject *result; - - __coverity_tainted_data_sanitize__(y); - __coverity_tainted_data_sanitize__(m); - __coverity_tainted_data_sanitize__(d); - __coverity_tainted_data_sanitize__(hh); - __coverity_tainted_data_sanitize__(mm); - __coverity_tainted_data_sanitize__(ss); - __coverity_tainted_data_sanitize__(dstflag); - - return result; -} - -static int -ymd_to_ord(int year, int month, int day) -{ - int ord = 0; - - __coverity_tainted_data_sanitize__(year); - __coverity_tainted_data_sanitize__(month); - __coverity_tainted_data_sanitize__(day); - - return ord; -} - -static int -normalize_date(int *year, int *month, int *day) -{ - __coverity_tainted_data_sanitize__(*year); - __coverity_tainted_data_sanitize__(*month); - __coverity_tainted_data_sanitize__(*day); - - return 0; -} - -static int -weekday(int year, int month, int day) -{ - int w = 0; - - __coverity_tainted_data_sanitize__(year); - __coverity_tainted_data_sanitize__(month); - __coverity_tainted_data_sanitize__(day); - - return w; -} - diff --git a/Misc/externals.spdx.json b/Misc/externals.spdx.json index f7aea9e8f990ba..d54b1fbe251378 100644 --- a/Misc/externals.spdx.json +++ b/Misc/externals.spdx.json @@ -112,42 +112,42 @@ "checksums": [ { "algorithm": "SHA256", - "checksumValue": "ad7623a44e1b6e42df47ba8f16b2b0435ac605650b5054077c4355a30473074c" + "checksumValue": "4c23f0dd3efcbe6f3a22c503a68d147617bb30c4f5290f1eb3eaacf0b460440b" } ], - "downloadLocation": "https://github.com/python/cpython-source-deps/archive/refs/tags/tcl-core-8.6.14.0.tar.gz", + "downloadLocation": "https://github.com/python/cpython-source-deps/archive/refs/tags/tcl-core-8.6.15.0.tar.gz", "externalRefs": [ { "referenceCategory": "SECURITY", - "referenceLocator": "cpe:2.3:a:tcl_tk:tcl_tk:8.6.14.0:*:*:*:*:*:*:*", + "referenceLocator": "cpe:2.3:a:tcl_tk:tcl_tk:8.6.15.0:*:*:*:*:*:*:*", "referenceType": "cpe23Type" } ], "licenseConcluded": "NOASSERTION", "name": "tcl-core", "primaryPackagePurpose": "SOURCE", - "versionInfo": "8.6.14.0" + "versionInfo": "8.6.15.0" }, { "SPDXID": "SPDXRef-PACKAGE-tk", "checksums": [ { "algorithm": "SHA256", - "checksumValue": "e8d5cbe97952037962518b69aba85e324d80aa189054c163ab0ee764a448e802" + "checksumValue": "0ae56d39bca92865f338529557a1e56d110594184b6dc5a91339c5675751e264" } ], - "downloadLocation": "https://github.com/python/cpython-source-deps/archive/refs/tags/tk-8.6.14.0.tar.gz", + "downloadLocation": "https://github.com/python/cpython-source-deps/archive/refs/tags/tk-8.6.15.0.tar.gz", "externalRefs": [ { "referenceCategory": "SECURITY", - "referenceLocator": "cpe:2.3:a:tcl_tk:tcl_tk:8.6.14.0:*:*:*:*:*:*:*", + "referenceLocator": "cpe:2.3:a:tcl_tk:tcl_tk:8.6.15.0:*:*:*:*:*:*:*", "referenceType": "cpe23Type" } ], "licenseConcluded": "NOASSERTION", "name": "tk", "primaryPackagePurpose": "SOURCE", - "versionInfo": "8.6.14.0" + "versionInfo": "8.6.15.0" }, { "SPDXID": "SPDXRef-PACKAGE-xz", diff --git a/Modules/Setup.stdlib.in b/Modules/Setup.stdlib.in index 9aa398a80efa1b..52c0f883d383db 100644 --- a/Modules/Setup.stdlib.in +++ b/Modules/Setup.stdlib.in @@ -163,7 +163,7 @@ @MODULE__TESTBUFFER_TRUE@_testbuffer _testbuffer.c @MODULE__TESTINTERNALCAPI_TRUE@_testinternalcapi _testinternalcapi.c _testinternalcapi/test_lock.c _testinternalcapi/pytime.c _testinternalcapi/set.c _testinternalcapi/test_critical_sections.c @MODULE__TESTCAPI_TRUE@_testcapi _testcapimodule.c _testcapi/vectorcall.c _testcapi/heaptype.c _testcapi/abstract.c _testcapi/unicode.c _testcapi/dict.c _testcapi/set.c _testcapi/list.c _testcapi/tuple.c _testcapi/getargs.c _testcapi/datetime.c _testcapi/docstring.c _testcapi/mem.c _testcapi/watchers.c _testcapi/long.c _testcapi/float.c _testcapi/complex.c _testcapi/numbers.c _testcapi/structmember.c _testcapi/exceptions.c _testcapi/code.c _testcapi/buffer.c _testcapi/pyatomic.c _testcapi/run.c _testcapi/file.c _testcapi/codec.c _testcapi/immortal.c _testcapi/gc.c _testcapi/hash.c _testcapi/time.c _testcapi/bytes.c _testcapi/object.c _testcapi/monitoring.c _testcapi/config.c -@MODULE__TESTLIMITEDCAPI_TRUE@_testlimitedcapi _testlimitedcapi.c _testlimitedcapi/abstract.c _testlimitedcapi/bytearray.c _testlimitedcapi/bytes.c _testlimitedcapi/complex.c _testlimitedcapi/dict.c _testlimitedcapi/eval.c _testlimitedcapi/float.c _testlimitedcapi/heaptype_relative.c _testlimitedcapi/list.c _testlimitedcapi/long.c _testlimitedcapi/object.c _testlimitedcapi/pyos.c _testlimitedcapi/set.c _testlimitedcapi/sys.c _testlimitedcapi/tuple.c _testlimitedcapi/unicode.c _testlimitedcapi/vectorcall_limited.c +@MODULE__TESTLIMITEDCAPI_TRUE@_testlimitedcapi _testlimitedcapi.c _testlimitedcapi/abstract.c _testlimitedcapi/bytearray.c _testlimitedcapi/bytes.c _testlimitedcapi/codec.c _testlimitedcapi/complex.c _testlimitedcapi/dict.c _testlimitedcapi/eval.c _testlimitedcapi/float.c _testlimitedcapi/heaptype_relative.c _testlimitedcapi/list.c _testlimitedcapi/long.c _testlimitedcapi/object.c _testlimitedcapi/pyos.c _testlimitedcapi/set.c _testlimitedcapi/sys.c _testlimitedcapi/tuple.c _testlimitedcapi/unicode.c _testlimitedcapi/vectorcall_limited.c @MODULE__TESTCLINIC_TRUE@_testclinic _testclinic.c @MODULE__TESTCLINIC_LIMITED_TRUE@_testclinic_limited _testclinic_limited.c diff --git a/Modules/_codecsmodule.c b/Modules/_codecsmodule.c index 32373f0799bfeb..471b42badc8e8c 100644 --- a/Modules/_codecsmodule.c +++ b/Modules/_codecsmodule.c @@ -979,6 +979,30 @@ _codecs_register_error_impl(PyObject *module, const char *errors, Py_RETURN_NONE; } +/*[clinic input] +_codecs._unregister_error -> bool + errors: str + / + +Un-register the specified error handler for the error handling `errors'. + +Only custom error handlers can be un-registered. An exception is raised +if the error handling is a built-in one (e.g., 'strict'), or if an error +occurs. + +Otherwise, this returns True if a custom handler has been successfully +un-registered, and False if no custom handler for the specified error +handling exists. + +[clinic start generated code]*/ + +static int +_codecs__unregister_error_impl(PyObject *module, const char *errors) +/*[clinic end generated code: output=28c22be667465503 input=a63ab9e9ce1686d4]*/ +{ + return _PyCodec_UnregisterError(errors); +} + /*[clinic input] _codecs.lookup_error name: str @@ -1044,6 +1068,7 @@ static PyMethodDef _codecs_functions[] = { _CODECS_CODE_PAGE_ENCODE_METHODDEF _CODECS_CODE_PAGE_DECODE_METHODDEF _CODECS_REGISTER_ERROR_METHODDEF + _CODECS__UNREGISTER_ERROR_METHODDEF _CODECS_LOOKUP_ERROR_METHODDEF {NULL, NULL} /* sentinel */ }; diff --git a/Modules/_collectionsmodule.c b/Modules/_collectionsmodule.c index fbfed59995c21e..aef04248c7e73c 100644 --- a/Modules/_collectionsmodule.c +++ b/Modules/_collectionsmodule.c @@ -2179,6 +2179,8 @@ typedef struct { PyObject *default_factory; } defdictobject; +static PyType_Spec defdict_spec; + PyDoc_STRVAR(defdict_missing_doc, "__missing__(key) # Called by __getitem__ for missing key; pseudo-code:\n\ if self.default_factory is None: raise KeyError((key,))\n\ @@ -2358,23 +2360,16 @@ defdict_or(PyObject* left, PyObject* right) { PyObject *self, *other; - // Find module state - PyTypeObject *tp = Py_TYPE(left); - PyObject *mod = PyType_GetModuleByDef(tp, &_collectionsmodule); - if (mod == NULL) { - PyErr_Clear(); - tp = Py_TYPE(right); - mod = PyType_GetModuleByDef(tp, &_collectionsmodule); + int ret = PyType_GetBaseByToken(Py_TYPE(left), &defdict_spec, NULL); + if (ret < 0) { + return NULL; } - assert(mod != NULL); - collections_state *state = get_module_state(mod); - - if (PyObject_TypeCheck(left, state->defdict_type)) { + if (ret) { self = left; other = right; } else { - assert(PyObject_TypeCheck(right, state->defdict_type)); + assert(PyType_GetBaseByToken(Py_TYPE(right), &defdict_spec, NULL) == 1); self = right; other = left; } @@ -2454,6 +2449,7 @@ passed to the dict constructor, including keyword arguments.\n\ #define DEFERRED_ADDRESS(ADDR) 0 static PyType_Slot defdict_slots[] = { + {Py_tp_token, Py_TP_USE_SPEC}, {Py_tp_dealloc, defdict_dealloc}, {Py_tp_repr, defdict_repr}, {Py_nb_or, defdict_or}, diff --git a/Modules/_cursesmodule.c b/Modules/_cursesmodule.c index c9ee5687c2b5d9..ece6b13c78851f 100644 --- a/Modules/_cursesmodule.c +++ b/Modules/_cursesmodule.c @@ -159,16 +159,32 @@ typedef chtype attr_t; /* No attr_t type is available */ #define _CURSES_PAIR_CONTENT_FUNC pair_content #endif /* _NCURSES_EXTENDED_COLOR_FUNCS */ +typedef struct _cursesmodule_state { + PyObject *error; // PyCursesError + PyTypeObject *window_type; // PyCursesWindow_Type +} _cursesmodule_state; + +// For now, we keep a global state variable to prepare for PEP 489. +static _cursesmodule_state curses_global_state; + +static inline _cursesmodule_state * +get_cursesmodule_state(PyObject *Py_UNUSED(module)) +{ + return &curses_global_state; +} + +static inline _cursesmodule_state * +get_cursesmodule_state_by_win(PyCursesWindowObject *Py_UNUSED(win)) +{ + return &curses_global_state; +} + /*[clinic input] module _curses class _curses.window "PyCursesWindowObject *" "&PyCursesWindow_Type" [clinic start generated code]*/ /*[clinic end generated code: output=da39a3ee5e6b4b0d input=43265c372c2887d6]*/ -/* Definition of exception curses.error */ - -static PyObject *PyCursesError; - /* Tells whether setupterm() has been called to initialise terminfo. */ static int curses_setupterm_called = FALSE; @@ -180,53 +196,113 @@ static int curses_start_color_called = FALSE; static const char *curses_screen_encoding = NULL; -/* Utility Macros */ -#define PyCursesSetupTermCalled \ +/* Utility Checking Procedures */ + +/* + * Function to check that 'funcname' has been called by testing + * the 'called' boolean. If an error occurs, a PyCursesError is + * set and this returns 0. Otherwise, this returns 1. + * + * Since this function can be called in functions that do not + * have a direct access to the module's state, the exception + * type is directly taken from the global state for now. + */ +static inline int +_PyCursesCheckFunction(int called, const char *funcname) +{ + if (called == TRUE) { + return 1; + } + PyErr_Format(curses_global_state.error, "must call %s() first", funcname); + return 0; +} + +/* + * Function to check that 'funcname' has been called by testing + * the 'called'' boolean. If an error occurs, a PyCursesError is + * set and this returns 0. Otherwise this returns 1. + * + * The exception type is obtained from the 'module' state. + */ +static inline int +_PyCursesStatefulCheckFunction(PyObject *module, int called, const char *funcname) +{ + if (called == TRUE) { + return 1; + } + _cursesmodule_state *state = get_cursesmodule_state(module); + PyErr_Format(state->error, "must call %s() first", funcname); + return 0; +} + +#define PyCursesStatefulSetupTermCalled(MODULE) \ do { \ - if (curses_setupterm_called != TRUE) { \ - PyErr_SetString(PyCursesError, \ - "must call (at least) setupterm() first"); \ + if (!_PyCursesStatefulCheckFunction(MODULE, \ + curses_setupterm_called, \ + "setupterm")) \ + { \ return 0; \ } \ } while (0) -#define PyCursesInitialised \ - do { \ - if (curses_initscr_called != TRUE) { \ - PyErr_SetString(PyCursesError, \ - "must call initscr() first"); \ - return 0; \ - } \ +#define PyCursesStatefulInitialised(MODULE) \ + do { \ + if (!_PyCursesStatefulCheckFunction(MODULE, \ + curses_initscr_called, \ + "initscr")) \ + { \ + return 0; \ + } \ } while (0) -#define PyCursesInitialisedColor \ - do { \ - if (curses_start_color_called != TRUE) { \ - PyErr_SetString(PyCursesError, \ - "must call start_color() first"); \ - return 0; \ - } \ +#define PyCursesStatefulInitialisedColor(MODULE) \ + do { \ + if (!_PyCursesStatefulCheckFunction(MODULE, \ + curses_start_color_called, \ + "start_color")) \ + { \ + return 0; \ + } \ } while (0) /* Utility Functions */ +static inline void +_PyCursesSetError(_cursesmodule_state *state, const char *funcname) +{ + if (funcname == NULL) { + PyErr_SetString(state->error, catchall_ERR); + } + else { + PyErr_Format(state->error, "%s() returned ERR", funcname); + } +} + /* * Check the return code from a curses function and return None - * or raise an exception as appropriate. These are exported using the - * capsule API. + * or raise an exception as appropriate. */ static PyObject * -PyCursesCheckERR(int code, const char *fname) +PyCursesCheckERR(PyObject *module, int code, const char *fname) { if (code != ERR) { Py_RETURN_NONE; } else { - if (fname == NULL) { - PyErr_SetString(PyCursesError, catchall_ERR); - } else { - PyErr_Format(PyCursesError, "%s() returned ERR", fname); - } + _cursesmodule_state *state = get_cursesmodule_state(module); + _PyCursesSetError(state, fname); + return NULL; + } +} + +static PyObject * +PyCursesCheckERR_ForWin(PyCursesWindowObject *win, int code, const char *fname) +{ + if (code != ERR) { + Py_RETURN_NONE; + } else { + _cursesmodule_state *state = get_cursesmodule_state_by_win(win); + _PyCursesSetError(state, fname); return NULL; } } @@ -555,20 +631,17 @@ class component_converter(CConverter): static int func_PyCursesSetupTermCalled(void) { - PyCursesSetupTermCalled; - return 1; + return _PyCursesCheckFunction(curses_setupterm_called, "setupterm"); } static int func_PyCursesInitialised(void) { - PyCursesInitialised; - return 1; + return _PyCursesCheckFunction(curses_initscr_called, "initscr"); } static int func_PyCursesInitialisedColor(void) { - PyCursesInitialisedColor; - return 1; + return _PyCursesCheckFunction(curses_start_color_called, "start_color"); } /***************************************************************************** @@ -590,7 +663,7 @@ PyTypeObject PyCursesWindow_Type; #define Window_NoArgNoReturnFunction(X) \ static PyObject *PyCursesWindow_ ## X \ (PyCursesWindowObject *self, PyObject *Py_UNUSED(ignored)) \ - { return PyCursesCheckERR(X(self->win), # X); } + { return PyCursesCheckERR_ForWin(self, X(self->win), # X); } #define Window_NoArgTrueFalseFunction(X) \ static PyObject * PyCursesWindow_ ## X \ @@ -625,7 +698,7 @@ PyTypeObject PyCursesWindow_Type; { \ TYPE arg1; \ if (!PyArg_ParseTuple(args,PARSESTR, &arg1)) return NULL; \ - return PyCursesCheckERR(X(self->win, arg1), # X); } + return PyCursesCheckERR_ForWin(self, X(self->win, arg1), # X); } #define Window_TwoArgNoReturnFunction(X, TYPE, PARSESTR) \ static PyObject * PyCursesWindow_ ## X \ @@ -633,7 +706,7 @@ PyTypeObject PyCursesWindow_Type; { \ TYPE arg1, arg2; \ if (!PyArg_ParseTuple(args,PARSESTR, &arg1, &arg2)) return NULL; \ - return PyCursesCheckERR(X(self->win, arg1, arg2), # X); } + return PyCursesCheckERR_ForWin(self, X(self->win, arg1, arg2), # X); } /* ------------- WINDOW routines --------------- */ @@ -807,7 +880,7 @@ _curses_window_addch_impl(PyCursesWindowObject *self, int group_left_1, else { return NULL; } - return PyCursesCheckERR(rtn, funcname); + return PyCursesCheckERR_ForWin(self, rtn, funcname); } /*[clinic input] @@ -887,7 +960,7 @@ _curses_window_addstr_impl(PyCursesWindowObject *self, int group_left_1, } if (use_attr) (void)wattrset(self->win,attr_old); - return PyCursesCheckERR(rtn, funcname); + return PyCursesCheckERR_ForWin(self, rtn, funcname); } /*[clinic input] @@ -970,7 +1043,7 @@ _curses_window_addnstr_impl(PyCursesWindowObject *self, int group_left_1, } if (use_attr) (void)wattrset(self->win,attr_old); - return PyCursesCheckERR(rtn, funcname); + return PyCursesCheckERR_ForWin(self, rtn, funcname); } /*[clinic input] @@ -994,7 +1067,7 @@ _curses_window_bkgd_impl(PyCursesWindowObject *self, PyObject *ch, long attr) if (!PyCurses_ConvertToChtype(self, ch, &bkgd)) return NULL; - return PyCursesCheckERR(wbkgd(self->win, bkgd | attr), "bkgd"); + return PyCursesCheckERR_ForWin(self, wbkgd(self->win, bkgd | attr), "bkgd"); } /*[clinic input] @@ -1010,7 +1083,7 @@ static PyObject * _curses_window_attroff_impl(PyCursesWindowObject *self, long attr) /*[clinic end generated code: output=8a2fcd4df682fc64 input=786beedf06a7befe]*/ { - return PyCursesCheckERR(wattroff(self->win, (attr_t)attr), "attroff"); + return PyCursesCheckERR_ForWin(self, wattroff(self->win, (attr_t)attr), "attroff"); } /*[clinic input] @@ -1026,7 +1099,7 @@ static PyObject * _curses_window_attron_impl(PyCursesWindowObject *self, long attr) /*[clinic end generated code: output=7afea43b237fa870 input=5a88fba7b1524f32]*/ { - return PyCursesCheckERR(wattron(self->win, (attr_t)attr), "attron"); + return PyCursesCheckERR_ForWin(self, wattron(self->win, (attr_t)attr), "attron"); } /*[clinic input] @@ -1042,7 +1115,7 @@ static PyObject * _curses_window_attrset_impl(PyCursesWindowObject *self, long attr) /*[clinic end generated code: output=84e379bff20c0433 input=42e400c0d0154ab5]*/ { - return PyCursesCheckERR(wattrset(self->win, (attr_t)attr), "attrset"); + return PyCursesCheckERR_ForWin(self, wattrset(self->win, (attr_t)attr), "attrset"); } /*[clinic input] @@ -1068,7 +1141,7 @@ _curses_window_bkgdset_impl(PyCursesWindowObject *self, PyObject *ch, return NULL; wbkgdset(self->win, bkgd | attr); - return PyCursesCheckERR(0, "bkgdset"); + return PyCursesCheckERR_ForWin(self, 0, "bkgdset"); } /*[clinic input] @@ -1268,7 +1341,7 @@ PyCursesWindow_ChgAt(PyCursesWindowObject *self, PyObject *args) rtn = wchgat(self->win,num,attr,color,NULL); touchline(self->win,y,1); } - return PyCursesCheckERR(rtn, "chgat"); + return PyCursesCheckERR_ForWin(self, rtn, "chgat"); } #endif @@ -1292,10 +1365,10 @@ _curses_window_delch_impl(PyCursesWindowObject *self, int group_right_1, /*[clinic end generated code: output=22e77bb9fa11b461 input=d2f79e630a4fc6d0]*/ { if (!group_right_1) { - return PyCursesCheckERR(wdelch(self->win), "wdelch"); + return PyCursesCheckERR_ForWin(self, wdelch(self->win), "wdelch"); } else { - return PyCursesCheckERR(py_mvwdelch(self->win, y, x), "mvwdelch"); + return PyCursesCheckERR_ForWin(self, py_mvwdelch(self->win, y, x), "mvwdelch"); } } @@ -1331,7 +1404,8 @@ _curses_window_derwin_impl(PyCursesWindowObject *self, int group_left_1, win = derwin(self->win,nlines,ncols,begin_y,begin_x); if (win == NULL) { - PyErr_SetString(PyCursesError, catchall_NULL); + _cursesmodule_state *state = get_cursesmodule_state_by_win(self); + PyErr_SetString(state->error, catchall_NULL); return NULL; } @@ -1363,13 +1437,15 @@ _curses_window_echochar_impl(PyCursesWindowObject *self, PyObject *ch, #ifdef py_is_pad if (py_is_pad(self->win)) { - return PyCursesCheckERR(pechochar(self->win, ch_ | (attr_t)attr), - "echochar"); + return PyCursesCheckERR_ForWin(self, + pechochar(self->win, ch_ | (attr_t)attr), + "echochar"); } else #endif - return PyCursesCheckERR(wechochar(self->win, ch_ | (attr_t)attr), - "echochar"); + return PyCursesCheckERR_ForWin(self, + wechochar(self->win, ch_ | (attr_t)attr), + "echochar"); } #ifdef NCURSES_MOUSE_VERSION @@ -1480,8 +1556,10 @@ _curses_window_getkey_impl(PyCursesWindowObject *self, int group_right_1, if (rtn == ERR) { /* getch() returns ERR in nodelay mode */ PyErr_CheckSignals(); - if (!PyErr_Occurred()) - PyErr_SetString(PyCursesError, "no input"); + if (!PyErr_Occurred()) { + _cursesmodule_state *state = get_cursesmodule_state_by_win(self); + PyErr_SetString(state->error, "no input"); + } return NULL; } else if (rtn <= 255) { #ifdef NCURSES_VERSION_MAJOR @@ -1539,7 +1617,8 @@ _curses_window_get_wch_impl(PyCursesWindowObject *self, int group_right_1, return NULL; /* get_wch() returns ERR in nodelay mode */ - PyErr_SetString(PyCursesError, "no input"); + _cursesmodule_state *state = get_cursesmodule_state_by_win(self); + PyErr_SetString(state->error, "no input"); return NULL; } if (ct == KEY_CODE_YES) @@ -1663,10 +1742,10 @@ _curses_window_hline_impl(PyCursesWindowObject *self, int group_left_1, return NULL; if (group_left_1) { if (wmove(self->win, y, x) == ERR) { - return PyCursesCheckERR(ERR, "wmove"); + return PyCursesCheckERR_ForWin(self, ERR, "wmove"); } } - return PyCursesCheckERR(whline(self->win, ch_ | (attr_t)attr, n), "hline"); + return PyCursesCheckERR_ForWin(self, whline(self->win, ch_ | (attr_t)attr, n), "hline"); } /*[clinic input] @@ -1713,7 +1792,7 @@ _curses_window_insch_impl(PyCursesWindowObject *self, int group_left_1, rtn = mvwinsch(self->win, y, x, ch_ | (attr_t)attr); } - return PyCursesCheckERR(rtn, "insch"); + return PyCursesCheckERR_ForWin(self, rtn, "insch"); } /*[clinic input] @@ -1890,7 +1969,7 @@ _curses_window_insstr_impl(PyCursesWindowObject *self, int group_left_1, } if (use_attr) (void)wattrset(self->win,attr_old); - return PyCursesCheckERR(rtn, funcname); + return PyCursesCheckERR_ForWin(self, rtn, funcname); } /*[clinic input] @@ -1975,7 +2054,7 @@ _curses_window_insnstr_impl(PyCursesWindowObject *self, int group_left_1, } if (use_attr) (void)wattrset(self->win,attr_old); - return PyCursesCheckERR(rtn, funcname); + return PyCursesCheckERR_ForWin(self, rtn, funcname); } /*[clinic input] @@ -2052,7 +2131,8 @@ _curses_window_noutrefresh_impl(PyCursesWindowObject *self) #ifdef py_is_pad if (py_is_pad(self->win)) { if (!group_right_1) { - PyErr_SetString(PyCursesError, + _cursesmodule_state *state = get_cursesmodule_state_by_win(self); + PyErr_SetString(state->error, "noutrefresh() called for a pad " "requires 6 arguments"); return NULL; @@ -2061,7 +2141,7 @@ _curses_window_noutrefresh_impl(PyCursesWindowObject *self) rtn = pnoutrefresh(self->win, pminrow, pmincol, sminrow, smincol, smaxrow, smaxcol); Py_END_ALLOW_THREADS - return PyCursesCheckERR(rtn, "pnoutrefresh"); + return PyCursesCheckERR_ForWin(self, rtn, "pnoutrefresh"); } if (group_right_1) { PyErr_SetString(PyExc_TypeError, @@ -2072,7 +2152,7 @@ _curses_window_noutrefresh_impl(PyCursesWindowObject *self) Py_BEGIN_ALLOW_THREADS rtn = wnoutrefresh(self->win); Py_END_ALLOW_THREADS - return PyCursesCheckERR(rtn, "wnoutrefresh"); + return PyCursesCheckERR_ForWin(self, rtn, "wnoutrefresh"); } /*[clinic input] @@ -2114,11 +2194,11 @@ _curses_window_overlay_impl(PyCursesWindowObject *self, if (group_right_1) { rtn = copywin(self->win, destwin->win, sminrow, smincol, dminrow, dmincol, dmaxrow, dmaxcol, TRUE); - return PyCursesCheckERR(rtn, "copywin"); + return PyCursesCheckERR_ForWin(self, rtn, "copywin"); } else { rtn = overlay(self->win, destwin->win); - return PyCursesCheckERR(rtn, "overlay"); + return PyCursesCheckERR_ForWin(self, rtn, "overlay"); } } @@ -2162,11 +2242,11 @@ _curses_window_overwrite_impl(PyCursesWindowObject *self, if (group_right_1) { rtn = copywin(self->win, destwin->win, sminrow, smincol, dminrow, dmincol, dmaxrow, dmaxcol, FALSE); - return PyCursesCheckERR(rtn, "copywin"); + return PyCursesCheckERR_ForWin(self, rtn, "copywin"); } else { rtn = overwrite(self->win, destwin->win); - return PyCursesCheckERR(rtn, "overwrite"); + return PyCursesCheckERR_ForWin(self, rtn, "overwrite"); } } @@ -2195,7 +2275,7 @@ _curses_window_putwin(PyCursesWindowObject *self, PyObject *file) return PyErr_SetFromErrno(PyExc_OSError); if (_Py_set_inheritable(fileno(fp), 0, NULL) < 0) goto exit; - res = PyCursesCheckERR(putwin(self->win, fp), "putwin"); + res = PyCursesCheckERR_ForWin(self, putwin(self->win, fp), "putwin"); if (res == NULL) goto exit; fseek(fp, 0, 0); @@ -2234,7 +2314,7 @@ static PyObject * _curses_window_redrawln_impl(PyCursesWindowObject *self, int beg, int num) /*[clinic end generated code: output=ea216e334f9ce1b4 input=152155e258a77a7a]*/ { - return PyCursesCheckERR(wredrawln(self->win,beg,num), "redrawln"); + return PyCursesCheckERR_ForWin(self, wredrawln(self->win,beg,num), "redrawln"); } /*[clinic input] @@ -2276,7 +2356,8 @@ _curses_window_refresh_impl(PyCursesWindowObject *self, int group_right_1, #ifdef py_is_pad if (py_is_pad(self->win)) { if (!group_right_1) { - PyErr_SetString(PyCursesError, + _cursesmodule_state *state = get_cursesmodule_state_by_win(self); + PyErr_SetString(state->error, "refresh() for a pad requires 6 arguments"); return NULL; } @@ -2284,7 +2365,7 @@ _curses_window_refresh_impl(PyCursesWindowObject *self, int group_right_1, rtn = prefresh(self->win, pminrow, pmincol, sminrow, smincol, smaxrow, smaxcol); Py_END_ALLOW_THREADS - return PyCursesCheckERR(rtn, "prefresh"); + return PyCursesCheckERR_ForWin(self, rtn, "prefresh"); } #endif if (group_right_1) { @@ -2295,7 +2376,7 @@ _curses_window_refresh_impl(PyCursesWindowObject *self, int group_right_1, Py_BEGIN_ALLOW_THREADS rtn = wrefresh(self->win); Py_END_ALLOW_THREADS - return PyCursesCheckERR(rtn, "prefresh"); + return PyCursesCheckERR_ForWin(self, rtn, "prefresh"); } /*[clinic input] @@ -2317,7 +2398,7 @@ _curses_window_setscrreg_impl(PyCursesWindowObject *self, int top, int bottom) /*[clinic end generated code: output=486ab5db218d2b1a input=1b517b986838bf0e]*/ { - return PyCursesCheckERR(wsetscrreg(self->win, top, bottom), "wsetscrreg"); + return PyCursesCheckERR_ForWin(self, wsetscrreg(self->win, top, bottom), "wsetscrreg"); } /*[clinic input] @@ -2358,7 +2439,8 @@ _curses_window_subwin_impl(PyCursesWindowObject *self, int group_left_1, win = subwin(self->win, nlines, ncols, begin_y, begin_x); if (win == NULL) { - PyErr_SetString(PyCursesError, catchall_NULL); + _cursesmodule_state *state = get_cursesmodule_state_by_win(self); + PyErr_SetString(state->error, catchall_NULL); return NULL; } @@ -2385,10 +2467,10 @@ _curses_window_scroll_impl(PyCursesWindowObject *self, int group_right_1, /*[clinic end generated code: output=4541a8a11852d360 input=c969ca0cfabbdbec]*/ { if (!group_right_1) { - return PyCursesCheckERR(scroll(self->win), "scroll"); + return PyCursesCheckERR_ForWin(self, scroll(self->win), "scroll"); } else { - return PyCursesCheckERR(wscrl(self->win, lines), "scroll"); + return PyCursesCheckERR_ForWin(self, wscrl(self->win, lines), "scroll"); } } @@ -2414,10 +2496,10 @@ _curses_window_touchline_impl(PyCursesWindowObject *self, int start, /*[clinic end generated code: output=65d05b3f7438c61d input=a98aa4f79b6be845]*/ { if (!group_right_1) { - return PyCursesCheckERR(touchline(self->win, start, count), "touchline"); + return PyCursesCheckERR_ForWin(self, touchline(self->win, start, count), "touchline"); } else { - return PyCursesCheckERR(wtouchln(self->win, start, count, changed), "touchline"); + return PyCursesCheckERR_ForWin(self, wtouchln(self->win, start, count, changed), "touchline"); } } @@ -2457,9 +2539,9 @@ _curses_window_vline_impl(PyCursesWindowObject *self, int group_left_1, return NULL; if (group_left_1) { if (wmove(self->win, y, x) == ERR) - return PyCursesCheckERR(ERR, "wmove"); + return PyCursesCheckERR_ForWin(self, ERR, "wmove"); } - return PyCursesCheckERR(wvline(self->win, ch_ | (attr_t)attr, n), "vline"); + return PyCursesCheckERR_ForWin(self, wvline(self->win, ch_ | (attr_t)attr, n), "vline"); } static PyObject * @@ -2632,7 +2714,7 @@ PyTypeObject PyCursesWindow_Type = { PyCursesWindow_getsets, /* tp_getset */ }; -/* Function Prototype Macros - They are ugly but very, very useful. ;-) +/* Function Body Macros - They are ugly but very, very useful. ;-) X - function name TYPE - parameter Type @@ -2642,37 +2724,37 @@ PyTypeObject PyCursesWindow_Type = { #define NoArgNoReturnFunctionBody(X) \ { \ - PyCursesInitialised; \ - return PyCursesCheckERR(X(), # X); } + PyCursesStatefulInitialised(module); \ + return PyCursesCheckERR(module, X(), # X); } #define NoArgOrFlagNoReturnFunctionBody(X, flag) \ { \ - PyCursesInitialised; \ + PyCursesStatefulInitialised(module); \ if (flag) \ - return PyCursesCheckERR(X(), # X); \ + return PyCursesCheckERR(module, X(), # X); \ else \ - return PyCursesCheckERR(no ## X(), # X); \ + return PyCursesCheckERR(module, no ## X(), # X); \ } #define NoArgReturnIntFunctionBody(X) \ { \ - PyCursesInitialised; \ + PyCursesStatefulInitialised(module); \ return PyLong_FromLong((long) X()); } #define NoArgReturnStringFunctionBody(X) \ { \ - PyCursesInitialised; \ + PyCursesStatefulInitialised(module); \ return PyBytes_FromString(X()); } #define NoArgTrueFalseFunctionBody(X) \ { \ - PyCursesInitialised; \ + PyCursesStatefulInitialised(module); \ return PyBool_FromLong(X()); } #define NoArgNoReturnVoidFunctionBody(X) \ { \ - PyCursesInitialised; \ + PyCursesStatefulInitialised(module); \ X(); \ Py_RETURN_NONE; } @@ -2770,12 +2852,13 @@ _curses_color_content_impl(PyObject *module, int color_number) { _CURSES_COLOR_VAL_TYPE r,g,b; - PyCursesInitialised; - PyCursesInitialisedColor; + PyCursesStatefulInitialised(module); + PyCursesStatefulInitialisedColor(module); if (_COLOR_CONTENT_FUNC(color_number, &r, &g, &b) == ERR) { - PyErr_Format(PyCursesError, "%s() returned ERR", - Py_STRINGIFY(_COLOR_CONTENT_FUNC)); + _cursesmodule_state *state = get_cursesmodule_state(module); + PyErr_Format(state->error, "%s() returned ERR", + Py_STRINGIFY(_COLOR_CONTENT_FUNC)); return NULL; } @@ -2799,8 +2882,8 @@ static PyObject * _curses_color_pair_impl(PyObject *module, int pair_number) /*[clinic end generated code: output=60718abb10ce9feb input=6034e9146f343802]*/ { - PyCursesInitialised; - PyCursesInitialisedColor; + PyCursesStatefulInitialised(module); + PyCursesStatefulInitialisedColor(module); return PyLong_FromLong(COLOR_PAIR(pair_number)); } @@ -2826,10 +2909,10 @@ _curses_curs_set_impl(PyObject *module, int visibility) { int erg; - PyCursesInitialised; + PyCursesStatefulInitialised(module); erg = curs_set(visibility); - if (erg == ERR) return PyCursesCheckERR(erg, "curs_set"); + if (erg == ERR) return PyCursesCheckERR(module, erg, "curs_set"); return PyLong_FromLong((long) erg); } @@ -2878,9 +2961,9 @@ static PyObject * _curses_delay_output_impl(PyObject *module, int ms) /*[clinic end generated code: output=b6613a67f17fa4f4 input=5316457f5f59196c]*/ { - PyCursesInitialised; + PyCursesStatefulInitialised(module); - return PyCursesCheckERR(delay_output(ms), "delay_output"); + return PyCursesCheckERR(module, delay_output(ms), "delay_output"); } /*[clinic input] @@ -2934,7 +3017,7 @@ _curses_erasechar_impl(PyObject *module) { char ch; - PyCursesInitialised; + PyCursesStatefulInitialised(module); ch = erasechar(); @@ -2984,7 +3067,7 @@ _curses_getsyx_impl(PyObject *module) int x = 0; int y = 0; - PyCursesInitialised; + PyCursesStatefulInitialised(module); getsyx(y, x); @@ -3009,11 +3092,12 @@ _curses_getmouse_impl(PyObject *module) int rtn; MEVENT event; - PyCursesInitialised; + PyCursesStatefulInitialised(module); rtn = getmouse( &event ); if (rtn == ERR) { - PyErr_SetString(PyCursesError, "getmouse() returned ERR"); + _cursesmodule_state *state = get_cursesmodule_state(module); + PyErr_SetString(state->error, "getmouse() returned ERR"); return NULL; } return Py_BuildValue("(hiiik)", @@ -3044,14 +3128,14 @@ _curses_ungetmouse_impl(PyObject *module, short id, int x, int y, int z, { MEVENT event; - PyCursesInitialised; + PyCursesStatefulInitialised(module); event.id = id; event.x = x; event.y = y; event.z = z; event.bstate = bstate; - return PyCursesCheckERR(ungetmouse(&event), "ungetmouse"); + return PyCursesCheckERR(module, ungetmouse(&event), "ungetmouse"); } #endif @@ -3077,7 +3161,7 @@ _curses_getwin(PyObject *module, PyObject *file) WINDOW *win; PyObject *res = NULL; - PyCursesInitialised; + PyCursesStatefulInitialised(module); fp = tmpfile(); if (fp == NULL) @@ -3107,7 +3191,8 @@ _curses_getwin(PyObject *module, PyObject *file) fseek(fp, 0, 0); win = getwin(fp); if (win == NULL) { - PyErr_SetString(PyCursesError, catchall_NULL); + _cursesmodule_state *state = get_cursesmodule_state(module); + PyErr_SetString(state->error, catchall_NULL); goto error; } res = PyCursesWindow_New(win, NULL); @@ -3133,9 +3218,9 @@ static PyObject * _curses_halfdelay_impl(PyObject *module, unsigned char tenths) /*[clinic end generated code: output=e92cdf0ef33c0663 input=e42dce7259c15100]*/ { - PyCursesInitialised; + PyCursesStatefulInitialised(module); - return PyCursesCheckERR(halfdelay(tenths), "halfdelay"); + return PyCursesCheckERR(module, halfdelay(tenths), "halfdelay"); } /*[clinic input] @@ -3186,7 +3271,7 @@ static PyObject * _curses_has_key_impl(PyObject *module, int key) /*[clinic end generated code: output=19ad48319414d0b1 input=78bd44acf1a4997c]*/ { - PyCursesInitialised; + PyCursesStatefulInitialised(module); return PyBool_FromLong(has_key(key)); } @@ -3217,10 +3302,11 @@ _curses_init_color_impl(PyObject *module, int color_number, short r, short g, short b) /*[clinic end generated code: output=d7ed71b2d818cdf2 input=ae2b8bea0f152c80]*/ { - PyCursesInitialised; - PyCursesInitialisedColor; + PyCursesStatefulInitialised(module); + PyCursesStatefulInitialisedColor(module); - return PyCursesCheckERR(_CURSES_INIT_COLOR_FUNC(color_number, r, g, b), + return PyCursesCheckERR(module, + _CURSES_INIT_COLOR_FUNC(color_number, r, g, b), Py_STRINGIFY(_CURSES_INIT_COLOR_FUNC)); } @@ -3245,8 +3331,8 @@ static PyObject * _curses_init_pair_impl(PyObject *module, int pair_number, int fg, int bg) /*[clinic end generated code: output=a0bba03d2bbc3ee6 input=54b421b44c12c389]*/ { - PyCursesInitialised; - PyCursesInitialisedColor; + PyCursesStatefulInitialised(module); + PyCursesStatefulInitialisedColor(module); if (_CURSES_INIT_PAIR_FUNC(pair_number, fg, bg) == ERR) { if (pair_number >= COLOR_PAIRS) { @@ -3255,7 +3341,8 @@ _curses_init_pair_impl(PyObject *module, int pair_number, int fg, int bg) COLOR_PAIRS - 1); } else { - PyErr_Format(PyCursesError, "%s() returned ERR", + _cursesmodule_state *state = get_cursesmodule_state(module); + PyErr_Format(state->error, "%s() returned ERR", Py_STRINGIFY(_CURSES_INIT_PAIR_FUNC)); } return NULL; @@ -3286,7 +3373,8 @@ _curses_initscr_impl(PyObject *module) win = initscr(); if (win == NULL) { - PyErr_SetString(PyCursesError, catchall_NULL); + _cursesmodule_state *state = get_cursesmodule_state(module); + PyErr_SetString(state->error, catchall_NULL); return NULL; } @@ -3415,9 +3503,8 @@ _curses_setupterm_impl(PyObject *module, const char *term, int fd) sys_stdout = PySys_GetObject("stdout"); if (sys_stdout == NULL || sys_stdout == Py_None) { - PyErr_SetString( - PyCursesError, - "lost sys.stdout"); + _cursesmodule_state *state = get_cursesmodule_state(module); + PyErr_SetString(state->error, "lost sys.stdout"); return NULL; } @@ -3437,7 +3524,8 @@ _curses_setupterm_impl(PyObject *module, const char *term, int fd) s = "setupterm: could not find terminfo database"; } - PyErr_SetString(PyCursesError,s); + _cursesmodule_state *state = get_cursesmodule_state(module); + PyErr_SetString(state->error, s); return NULL; } @@ -3487,7 +3575,7 @@ _curses_set_escdelay_impl(PyObject *module, int ms) return NULL; } - return PyCursesCheckERR(set_escdelay(ms), "set_escdelay"); + return PyCursesCheckERR(module, set_escdelay(ms), "set_escdelay"); } /*[clinic input] @@ -3526,7 +3614,7 @@ _curses_set_tabsize_impl(PyObject *module, int size) return NULL; } - return PyCursesCheckERR(set_tabsize(size), "set_tabsize"); + return PyCursesCheckERR(module, set_tabsize(size), "set_tabsize"); } #endif @@ -3542,9 +3630,9 @@ static PyObject * _curses_intrflush_impl(PyObject *module, int flag) /*[clinic end generated code: output=c1986df35e999a0f input=c65fe2ef973fe40a]*/ { - PyCursesInitialised; + PyCursesStatefulInitialised(module); - return PyCursesCheckERR(intrflush(NULL, flag), "intrflush"); + return PyCursesCheckERR(module, intrflush(NULL, flag), "intrflush"); } /*[clinic input] @@ -3575,7 +3663,7 @@ static PyObject * _curses_is_term_resized_impl(PyObject *module, int nlines, int ncols) /*[clinic end generated code: output=aafe04afe50f1288 input=ca9c0bd0fb8ab444]*/ { - PyCursesInitialised; + PyCursesStatefulInitialised(module); return PyBool_FromLong(is_term_resized(nlines, ncols)); } @@ -3597,7 +3685,7 @@ _curses_keyname_impl(PyObject *module, int key) { const char *knp; - PyCursesInitialised; + PyCursesStatefulInitialised(module); if (key < 0) { PyErr_SetString(PyExc_ValueError, "invalid key number"); @@ -3655,9 +3743,9 @@ static PyObject * _curses_meta_impl(PyObject *module, int yes) /*[clinic end generated code: output=22f5abda46a605d8 input=cfe7da79f51d0e30]*/ { - PyCursesInitialised; + PyCursesStatefulInitialised(module); - return PyCursesCheckERR(meta(stdscr, yes), "meta"); + return PyCursesCheckERR(module, meta(stdscr, yes), "meta"); } #ifdef NCURSES_MOUSE_VERSION @@ -3679,9 +3767,9 @@ static PyObject * _curses_mouseinterval_impl(PyObject *module, int interval) /*[clinic end generated code: output=c4f5ff04354634c5 input=75aaa3f0db10ac4e]*/ { - PyCursesInitialised; + PyCursesStatefulInitialised(module); - return PyCursesCheckERR(mouseinterval(interval), "mouseinterval"); + return PyCursesCheckERR(module, mouseinterval(interval), "mouseinterval"); } /*[clinic input] @@ -3704,7 +3792,7 @@ _curses_mousemask_impl(PyObject *module, unsigned long newmask) { mmask_t oldmask, availmask; - PyCursesInitialised; + PyCursesStatefulInitialised(module); availmask = mousemask((mmask_t)newmask, &oldmask); return Py_BuildValue("(kk)", (unsigned long)availmask, (unsigned long)oldmask); @@ -3725,7 +3813,7 @@ static int _curses_napms_impl(PyObject *module, int ms) /*[clinic end generated code: output=5f292a6a724491bd input=c6d6e01f2f1df9f7]*/ { - PyCursesInitialised; + PyCursesStatefulInitialised(module); return napms(ms); } @@ -3749,12 +3837,13 @@ _curses_newpad_impl(PyObject *module, int nlines, int ncols) { WINDOW *win; - PyCursesInitialised; + PyCursesStatefulInitialised(module); win = newpad(nlines, ncols); if (win == NULL) { - PyErr_SetString(PyCursesError, catchall_NULL); + _cursesmodule_state *state = get_cursesmodule_state(module); + PyErr_SetString(state->error, catchall_NULL); return NULL; } @@ -3789,11 +3878,12 @@ _curses_newwin_impl(PyObject *module, int nlines, int ncols, { WINDOW *win; - PyCursesInitialised; + PyCursesStatefulInitialised(module); win = newwin(nlines,ncols,begin_y,begin_x); if (win == NULL) { - PyErr_SetString(PyCursesError, catchall_NULL); + _cursesmodule_state *state = get_cursesmodule_state(module); + PyErr_SetString(state->error, catchall_NULL); return NULL; } @@ -3901,8 +3991,8 @@ _curses_pair_content_impl(PyObject *module, int pair_number) { _CURSES_COLOR_NUM_TYPE f, b; - PyCursesInitialised; - PyCursesInitialisedColor; + PyCursesStatefulInitialised(module); + PyCursesStatefulInitialisedColor(module); if (_CURSES_PAIR_CONTENT_FUNC(pair_number, &f, &b) == ERR) { if (pair_number >= COLOR_PAIRS) { @@ -3911,7 +4001,8 @@ _curses_pair_content_impl(PyObject *module, int pair_number) COLOR_PAIRS - 1); } else { - PyErr_Format(PyCursesError, "%s() returned ERR", + _cursesmodule_state *state = get_cursesmodule_state(module); + PyErr_Format(state->error, "%s() returned ERR", Py_STRINGIFY(_CURSES_PAIR_CONTENT_FUNC)); } return NULL; @@ -3935,8 +4026,8 @@ static PyObject * _curses_pair_number_impl(PyObject *module, int attr) /*[clinic end generated code: output=85bce7d65c0aa3f4 input=d478548e33f5e61a]*/ { - PyCursesInitialised; - PyCursesInitialisedColor; + PyCursesStatefulInitialised(module); + PyCursesStatefulInitialisedColor(module); return PyLong_FromLong(PAIR_NUMBER(attr)); } @@ -3956,7 +4047,7 @@ static PyObject * _curses_putp_impl(PyObject *module, const char *string) /*[clinic end generated code: output=e98081d1b8eb5816 input=1601faa828b44cb3]*/ { - return PyCursesCheckERR(putp(string), "putp"); + return PyCursesCheckERR(module, putp(string), "putp"); } /*[clinic input] @@ -3976,7 +4067,7 @@ static PyObject * _curses_qiflush_impl(PyObject *module, int flag) /*[clinic end generated code: output=9167e862f760ea30 input=6ec8b3e2b717ec40]*/ { - PyCursesInitialised; + PyCursesStatefulInitialised(module); if (flag) { qiflush(); @@ -4036,7 +4127,7 @@ update_lines_cols(PyObject *private_module) error: Py_XDECREF(o); - Py_DECREF(exposed_module); + Py_XDECREF(exposed_module); return 0; } @@ -4131,9 +4222,9 @@ _curses_resizeterm_impl(PyObject *module, int nlines, int ncols) { PyObject *result; - PyCursesInitialised; + PyCursesStatefulInitialised(module); - result = PyCursesCheckERR(resizeterm(nlines, ncols), "resizeterm"); + result = PyCursesCheckERR(module, resizeterm(nlines, ncols), "resizeterm"); if (!result) return NULL; if (!update_lines_cols(module)) { @@ -4170,9 +4261,9 @@ _curses_resize_term_impl(PyObject *module, int nlines, int ncols) { PyObject *result; - PyCursesInitialised; + PyCursesStatefulInitialised(module); - result = PyCursesCheckERR(resize_term(nlines, ncols), "resize_term"); + result = PyCursesCheckERR(module, resize_term(nlines, ncols), "resize_term"); if (!result) return NULL; if (!update_lines_cols(module)) { @@ -4213,7 +4304,7 @@ static PyObject * _curses_setsyx_impl(PyObject *module, int y, int x) /*[clinic end generated code: output=23dcf753511a2464 input=fa7f2b208e10a557]*/ { - PyCursesInitialised; + PyCursesStatefulInitialised(module); setsyx(y,x); @@ -4238,10 +4329,11 @@ static PyObject * _curses_start_color_impl(PyObject *module) /*[clinic end generated code: output=8b772b41d8090ede input=0ca0ecb2b77e1a12]*/ { - PyCursesInitialised; + PyCursesStatefulInitialised(module); if (start_color() == ERR) { - PyErr_SetString(PyCursesError, "start_color() returned ERR"); + _cursesmodule_state *state = get_cursesmodule_state(module); + PyErr_SetString(state->error, "start_color() returned ERR"); return NULL; } @@ -4310,7 +4402,7 @@ static PyObject * _curses_tigetflag_impl(PyObject *module, const char *capname) /*[clinic end generated code: output=8853c0e55542195b input=b0787af9e3e9a6ce]*/ { - PyCursesSetupTermCalled; + PyCursesStatefulSetupTermCalled(module); return PyLong_FromLong( (long) tigetflag( (char *)capname ) ); } @@ -4332,7 +4424,7 @@ static PyObject * _curses_tigetnum_impl(PyObject *module, const char *capname) /*[clinic end generated code: output=46f8b0a1b5dff42f input=5cdf2f410b109720]*/ { - PyCursesSetupTermCalled; + PyCursesStatefulSetupTermCalled(module); return PyLong_FromLong( (long) tigetnum( (char *)capname ) ); } @@ -4354,7 +4446,7 @@ static PyObject * _curses_tigetstr_impl(PyObject *module, const char *capname) /*[clinic end generated code: output=f22b576ad60248f3 input=36644df25c73c0a7]*/ { - PyCursesSetupTermCalled; + PyCursesStatefulSetupTermCalled(module); capname = tigetstr( (char *)capname ); if (capname == NULL || capname == (char*) -1) { @@ -4389,11 +4481,12 @@ _curses_tparm_impl(PyObject *module, const char *str, int i1, int i2, int i3, { char* result = NULL; - PyCursesSetupTermCalled; + PyCursesStatefulSetupTermCalled(module); result = tparm((char *)str,i1,i2,i3,i4,i5,i6,i7,i8,i9); if (!result) { - PyErr_SetString(PyCursesError, "tparm() returned NULL"); + _cursesmodule_state *state = get_cursesmodule_state(module); + PyErr_SetString(state->error, "tparm() returned NULL"); return NULL; } @@ -4417,9 +4510,9 @@ static PyObject * _curses_typeahead_impl(PyObject *module, int fd) /*[clinic end generated code: output=084bb649d7066583 input=f2968d8e1805051b]*/ { - PyCursesInitialised; + PyCursesStatefulInitialised(module); - return PyCursesCheckERR(typeahead( fd ), "typeahead"); + return PyCursesCheckERR(module, typeahead( fd ), "typeahead"); } #endif @@ -4441,7 +4534,7 @@ _curses_unctrl(PyObject *module, PyObject *ch) { chtype ch_; - PyCursesInitialised; + PyCursesStatefulInitialised(module); if (!PyCurses_ConvertToChtype(NULL, ch, &ch_)) return NULL; @@ -4464,12 +4557,12 @@ _curses_ungetch(PyObject *module, PyObject *ch) { chtype ch_; - PyCursesInitialised; + PyCursesStatefulInitialised(module); if (!PyCurses_ConvertToChtype(NULL, ch, &ch_)) return NULL; - return PyCursesCheckERR(ungetch(ch_), "ungetch"); + return PyCursesCheckERR(module, ungetch(ch_), "ungetch"); } #ifdef HAVE_NCURSESW @@ -4535,11 +4628,11 @@ _curses_unget_wch(PyObject *module, PyObject *ch) { wchar_t wch; - PyCursesInitialised; + PyCursesStatefulInitialised(module); if (!PyCurses_ConvertToWchar_t(ch, &wch)) return NULL; - return PyCursesCheckERR(unget_wch(wch), "unget_wch"); + return PyCursesCheckERR(module, unget_wch(wch), "unget_wch"); } #endif @@ -4587,14 +4680,15 @@ _curses_use_default_colors_impl(PyObject *module) { int code; - PyCursesInitialised; - PyCursesInitialisedColor; + PyCursesStatefulInitialised(module); + PyCursesStatefulInitialisedColor(module); code = use_default_colors(); if (code != ERR) { Py_RETURN_NONE; } else { - PyErr_SetString(PyCursesError, "use_default_colors() returned ERR"); + _cursesmodule_state *state = get_cursesmodule_state(module); + PyErr_SetString(state->error, "use_default_colors() returned ERR"); return NULL; } } @@ -4785,6 +4879,7 @@ curses_destructor(PyObject *op) static int cursesmodule_exec(PyObject *module) { + _cursesmodule_state *state = get_cursesmodule_state(module); /* Initialize object type */ if (PyType_Ready(&PyCursesWindow_Type) < 0) { return -1; @@ -4792,6 +4887,7 @@ cursesmodule_exec(PyObject *module) if (PyModule_AddType(module, &PyCursesWindow_Type) < 0) { return -1; } + state->window_type = &PyCursesWindow_Type; /* Add some symbolic constants to the module */ PyObject *module_dict = PyModule_GetDict(module); @@ -4825,12 +4921,12 @@ cursesmodule_exec(PyObject *module) } /* For exception curses.error */ - PyCursesError = PyErr_NewException("_curses.error", NULL, NULL); - if (PyCursesError == NULL) { + state->error = PyErr_NewException("_curses.error", NULL, NULL); + if (state->error == NULL) { return -1; } - rc = PyDict_SetItemString(module_dict, "error", PyCursesError); - Py_DECREF(PyCursesError); + rc = PyDict_SetItemString(module_dict, "error", state->error); + Py_DECREF(state->error); if (rc < 0) { return -1; } diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c index 8562e0ca0bbbab..90527d2a3e0350 100644 --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -3445,6 +3445,27 @@ date_fromisocalendar(PyObject *cls, PyObject *args, PyObject *kw) return new_date_subclass_ex(year, month, day, cls); } +/* Return new date from _strptime.strptime_datetime_date(). */ +static PyObject * +date_strptime(PyObject *cls, PyObject *args) +{ + PyObject *string, *format, *result; + + if (!PyArg_ParseTuple(args, "UU:strptime", &string, &format)) { + return NULL; + } + + PyObject *module = PyImport_Import(&_Py_ID(_strptime)); + if (module == NULL) { + return NULL; + } + result = PyObject_CallMethodObjArgs(module, + &_Py_ID(_strptime_datetime_date), cls, + string, format, NULL); + Py_DECREF(module); + return result; +} + /* * Date arithmetic. @@ -3910,6 +3931,11 @@ static PyMethodDef date_methods[] = { "number and weekday.\n\n" "This is the inverse of the date.isocalendar() function")}, + {"strptime", (PyCFunction)date_strptime, + METH_VARARGS | METH_CLASS, + PyDoc_STR("string, format -> new date parsed from a string " + "(like time.strptime()).")}, + {"today", (PyCFunction)date_today, METH_NOARGS | METH_CLASS, PyDoc_STR("Current date or datetime: same as " "self.__class__.fromtimestamp(time.time()).")}, @@ -4644,6 +4670,27 @@ time_new(PyTypeObject *type, PyObject *args, PyObject *kw) return self; } +/* Return new time from _strptime.strptime_datetime_time(). */ +static PyObject * +time_strptime(PyObject *cls, PyObject *args) +{ + PyObject *string, *format, *result; + + if (!PyArg_ParseTuple(args, "UU:strptime", &string, &format)) { + return NULL; + } + + PyObject *module = PyImport_Import(&_Py_ID(_strptime)); + if (module == NULL) { + return NULL; + } + result = PyObject_CallMethodObjArgs(module, + &_Py_ID(_strptime_datetime_time), cls, + string, format, NULL); + Py_DECREF(module); + return result; +} + /* * Destructor. */ @@ -4997,6 +5044,14 @@ time_fromisoformat(PyObject *cls, PyObject *tstr) { goto invalid_string_error; } + if (hour == 24) { + if (minute == 0 && second == 0 && microsecond == 0) { + hour = 0; + } else { + goto invalid_iso_midnight; + } + } + PyObject *tzinfo = tzinfo_from_isoformat_results(rv, tzoffset, tzimicrosecond); @@ -5015,6 +5070,10 @@ time_fromisoformat(PyObject *cls, PyObject *tstr) { Py_DECREF(tzinfo); return t; +invalid_iso_midnight: + PyErr_SetString(PyExc_ValueError, "minute, second, and microsecond must be 0 when hour is 24"); + return NULL; + invalid_string_error: PyErr_Format(PyExc_ValueError, "Invalid isoformat string: %R", tstr); return NULL; @@ -5067,6 +5126,15 @@ time_reduce(PyDateTime_Time *self, PyObject *arg) static PyMethodDef time_methods[] = { + /* Class method: */ + + {"strptime", (PyCFunction)time_strptime, + METH_VARARGS | METH_CLASS, + PyDoc_STR("string, format -> new time parsed from a string " + "(like time.strptime()).")}, + + /* Instance methods: */ + {"isoformat", _PyCFunction_CAST(time_isoformat), METH_VARARGS | METH_KEYWORDS, PyDoc_STR("Return string in ISO 8601 format, [HH[:MM[:SS[.mmm[uuu]]]]]" "[+HH:MM].\n\n" @@ -5574,7 +5642,7 @@ datetime_utcfromtimestamp(PyObject *cls, PyObject *args) return result; } -/* Return new datetime from _strptime.strptime_datetime(). */ +/* Return new datetime from _strptime.strptime_datetime_datetime(). */ static PyObject * datetime_strptime(PyObject *cls, PyObject *args) { @@ -5587,7 +5655,8 @@ datetime_strptime(PyObject *cls, PyObject *args) if (module == NULL) { return NULL; } - result = PyObject_CallMethodObjArgs(module, &_Py_ID(_strptime_datetime), + result = PyObject_CallMethodObjArgs(module, + &_Py_ID(_strptime_datetime_datetime), cls, string, format, NULL); Py_DECREF(module); return result; @@ -5861,6 +5930,26 @@ datetime_fromisoformat(PyObject *cls, PyObject *dtstr) goto error; } + if ((hour == 24) && (month <= 12)) { + int d_in_month = days_in_month(year, month); + if (day <= d_in_month) { + if (minute == 0 && second == 0 && microsecond == 0) { + // Calculate midnight of the next day + hour = 0; + day += 1; + if (day > d_in_month) { + day = 1; + month += 1; + if (month > 12) { + month = 1; + year += 1; + } + } + } else { + goto invalid_iso_midnight; + } + } + } PyObject *dt = new_datetime_subclass_ex(year, month, day, hour, minute, second, microsecond, tzinfo, cls); @@ -5868,6 +5957,12 @@ datetime_fromisoformat(PyObject *cls, PyObject *dtstr) Py_DECREF(dtstr_clean); return dt; +invalid_iso_midnight: + PyErr_SetString(PyExc_ValueError, "minute, second, and microsecond must be 0 when hour is 24"); + Py_DECREF(tzinfo); + Py_DECREF(dtstr_clean); + return NULL; + invalid_string_error: PyErr_Format(PyExc_ValueError, "Invalid isoformat string: %R", dtstr); diff --git a/Modules/_decimal/_decimal.c b/Modules/_decimal/_decimal.c index e99a96ab93392e..a33c9793b5ad17 100644 --- a/Modules/_decimal/_decimal.c +++ b/Modules/_decimal/_decimal.c @@ -122,6 +122,8 @@ get_module_state(PyObject *mod) } static struct PyModuleDef _decimal_module; +static PyType_Spec dec_spec; +static PyType_Spec context_spec; static inline decimal_state * get_module_state_by_def(PyTypeObject *tp) @@ -134,10 +136,16 @@ get_module_state_by_def(PyTypeObject *tp) static inline decimal_state * find_state_left_or_right(PyObject *left, PyObject *right) { - PyObject *mod = _PyType_GetModuleByDef2(Py_TYPE(left), Py_TYPE(right), - &_decimal_module); - assert(mod != NULL); - return get_module_state(mod); + PyTypeObject *base; + if (PyType_GetBaseByToken(Py_TYPE(left), &dec_spec, &base) != 1) { + assert(!PyErr_Occurred()); + PyType_GetBaseByToken(Py_TYPE(right), &dec_spec, &base); + } + assert(base != NULL); + void *state = _PyType_GetModuleState(base); + assert(state != NULL); + Py_DECREF(base); + return (decimal_state *)state; } @@ -183,6 +191,7 @@ typedef struct PyDecContextObject { PyObject *flags; int capitals; PyThreadState *tstate; + decimal_state *modstate; } PyDecContextObject; typedef struct { @@ -203,6 +212,15 @@ typedef struct { #define CTX(v) (&((PyDecContextObject *)v)->ctx) #define CtxCaps(v) (((PyDecContextObject *)v)->capitals) +static inline decimal_state * +get_module_state_from_ctx(PyObject *v) +{ + assert(PyType_GetBaseByToken(Py_TYPE(v), &context_spec, NULL) == 1); + decimal_state *state = ((PyDecContextObject *)v)->modstate; + assert(state != NULL); + return state; +} + Py_LOCAL_INLINE(PyObject *) incr_true(void) @@ -557,7 +575,7 @@ static int dec_addstatus(PyObject *context, uint32_t status) { mpd_context_t *ctx = CTX(context); - decimal_state *state = get_module_state_by_def(Py_TYPE(context)); + decimal_state *state = get_module_state_from_ctx(context); ctx->status |= status; if (status & (ctx->traps|MPD_Malloc_error)) { @@ -745,7 +763,7 @@ signaldict_richcompare(PyObject *v, PyObject *w, int op) { PyObject *res = Py_NotImplemented; - decimal_state *state = find_state_left_or_right(v, w); + decimal_state *state = get_module_state_by_def(Py_TYPE(v)); assert(PyDecSignalDict_Check(state, v)); if ((SdFlagAddr(v) == NULL) || (SdFlagAddr(w) == NULL)) { @@ -852,7 +870,7 @@ static PyObject * context_getround(PyObject *self, void *Py_UNUSED(closure)) { int i = mpd_getround(CTX(self)); - decimal_state *state = get_module_state_by_def(Py_TYPE(self)); + decimal_state *state = get_module_state_from_ctx(self); return Py_NewRef(state->round_map[i]); } @@ -1011,7 +1029,7 @@ context_setround(PyObject *self, PyObject *value, void *Py_UNUSED(closure)) mpd_context_t *ctx; int x; - decimal_state *state = get_module_state_by_def(Py_TYPE(self)); + decimal_state *state = get_module_state_from_ctx(self); x = getround(state, value); if (x == -1) { return -1; @@ -1070,7 +1088,7 @@ context_settraps_list(PyObject *self, PyObject *value) { mpd_context_t *ctx; uint32_t flags; - decimal_state *state = get_module_state_by_def(Py_TYPE(self)); + decimal_state *state = get_module_state_from_ctx(self); flags = list_as_flags(state, value); if (flags & DEC_ERRORS) { return -1; @@ -1090,7 +1108,7 @@ context_settraps_dict(PyObject *self, PyObject *value) mpd_context_t *ctx; uint32_t flags; - decimal_state *state = get_module_state_by_def(Py_TYPE(self)); + decimal_state *state = get_module_state_from_ctx(self); if (PyDecSignalDict_Check(state, value)) { flags = SdFlags(value); } @@ -1135,7 +1153,7 @@ context_setstatus_list(PyObject *self, PyObject *value) { mpd_context_t *ctx; uint32_t flags; - decimal_state *state = get_module_state_by_def(Py_TYPE(self)); + decimal_state *state = get_module_state_from_ctx(self); flags = list_as_flags(state, value); if (flags & DEC_ERRORS) { @@ -1156,7 +1174,7 @@ context_setstatus_dict(PyObject *self, PyObject *value) mpd_context_t *ctx; uint32_t flags; - decimal_state *state = get_module_state_by_def(Py_TYPE(self)); + decimal_state *state = get_module_state_from_ctx(self); if (PyDecSignalDict_Check(state, value)) { flags = SdFlags(value); } @@ -1386,6 +1404,7 @@ context_new(PyTypeObject *type, CtxCaps(self) = 1; self->tstate = NULL; + self->modstate = state; if (type == state->PyDecContext_Type) { PyObject_GC_Track(self); @@ -1463,7 +1482,7 @@ context_repr(PyDecContextObject *self) int n, mem; #ifdef Py_DEBUG - decimal_state *state = get_module_state_by_def(Py_TYPE(self)); + decimal_state *state = get_module_state_from_ctx((PyObject *)self); assert(PyDecContext_Check(state, self)); #endif ctx = CTX(self); @@ -1554,7 +1573,7 @@ context_copy(PyObject *self, PyObject *Py_UNUSED(dummy)) { PyObject *copy; - decimal_state *state = get_module_state_by_def(Py_TYPE(self)); + decimal_state *state = get_module_state_from_ctx(self); copy = PyObject_CallObject((PyObject *)state->PyDecContext_Type, NULL); if (copy == NULL) { return NULL; @@ -1574,7 +1593,7 @@ context_reduce(PyObject *self, PyObject *Py_UNUSED(dummy)) PyObject *traps; PyObject *ret; mpd_context_t *ctx; - decimal_state *state = get_module_state_by_def(Py_TYPE(self)); + decimal_state *state = get_module_state_from_ctx(self); ctx = CTX(self); @@ -2015,11 +2034,10 @@ static PyType_Spec ctxmanager_spec = { /******************************************************************************/ static PyObject * -PyDecType_New(PyTypeObject *type) +PyDecType_New(decimal_state *state, PyTypeObject *type) { PyDecObject *dec; - decimal_state *state = get_module_state_by_def(type); if (type == state->PyDec_Type) { dec = PyObject_GC_New(PyDecObject, state->PyDec_Type); } @@ -2045,7 +2063,7 @@ PyDecType_New(PyTypeObject *type) assert(PyObject_GC_IsTracked((PyObject *)dec)); return (PyObject *)dec; } -#define dec_alloc(st) PyDecType_New((st)->PyDec_Type) +#define dec_alloc(st) PyDecType_New(st, (st)->PyDec_Type) static int dec_traverse(PyObject *dec, visitproc visit, void *arg) @@ -2148,7 +2166,8 @@ PyDecType_FromCString(PyTypeObject *type, const char *s, PyObject *dec; uint32_t status = 0; - dec = PyDecType_New(type); + decimal_state *state = get_module_state_from_ctx(context); + dec = PyDecType_New(state, type); if (dec == NULL) { return NULL; } @@ -2172,7 +2191,8 @@ PyDecType_FromCStringExact(PyTypeObject *type, const char *s, uint32_t status = 0; mpd_context_t maxctx; - dec = PyDecType_New(type); + decimal_state *state = get_module_state_from_ctx(context); + dec = PyDecType_New(state, type); if (dec == NULL) { return NULL; } @@ -2259,7 +2279,8 @@ PyDecType_FromSsize(PyTypeObject *type, mpd_ssize_t v, PyObject *context) PyObject *dec; uint32_t status = 0; - dec = PyDecType_New(type); + decimal_state *state = get_module_state_from_ctx(context); + dec = PyDecType_New(state, type); if (dec == NULL) { return NULL; } @@ -2280,7 +2301,8 @@ PyDecType_FromSsizeExact(PyTypeObject *type, mpd_ssize_t v, PyObject *context) uint32_t status = 0; mpd_context_t maxctx; - dec = PyDecType_New(type); + decimal_state *state = get_module_state_from_ctx(context); + dec = PyDecType_New(state, type); if (dec == NULL) { return NULL; } @@ -2298,13 +2320,13 @@ PyDecType_FromSsizeExact(PyTypeObject *type, mpd_ssize_t v, PyObject *context) /* Convert from a PyLongObject. The context is not modified; flags set during conversion are accumulated in the status parameter. */ static PyObject * -dec_from_long(PyTypeObject *type, PyObject *v, +dec_from_long(decimal_state *state, PyTypeObject *type, PyObject *v, const mpd_context_t *ctx, uint32_t *status) { PyObject *dec; PyLongObject *l = (PyLongObject *)v; - dec = PyDecType_New(type); + dec = PyDecType_New(state, type); if (dec == NULL) { return NULL; } @@ -2349,7 +2371,8 @@ PyDecType_FromLong(PyTypeObject *type, PyObject *v, PyObject *context) return NULL; } - dec = dec_from_long(type, v, CTX(context), &status); + decimal_state *state = get_module_state_from_ctx(context); + dec = dec_from_long(state, type, v, CTX(context), &status); if (dec == NULL) { return NULL; } @@ -2378,7 +2401,8 @@ PyDecType_FromLongExact(PyTypeObject *type, PyObject *v, } mpd_maxcontext(&maxctx); - dec = dec_from_long(type, v, &maxctx, &status); + decimal_state *state = get_module_state_from_ctx(context); + dec = dec_from_long(state, type, v, &maxctx, &status); if (dec == NULL) { return NULL; } @@ -2410,7 +2434,7 @@ PyDecType_FromFloatExact(PyTypeObject *type, PyObject *v, mpd_t *d1, *d2; uint32_t status = 0; mpd_context_t maxctx; - decimal_state *state = get_module_state_by_def(type); + decimal_state *state = get_module_state_from_ctx(context); #ifdef Py_DEBUG assert(PyType_IsSubtype(type, state->PyDec_Type)); @@ -2431,7 +2455,7 @@ PyDecType_FromFloatExact(PyTypeObject *type, PyObject *v, sign = (copysign(1.0, x) == 1.0) ? 0 : 1; if (isnan(x) || isinf(x)) { - dec = PyDecType_New(type); + dec = PyDecType_New(state, type); if (dec == NULL) { return NULL; } @@ -2548,12 +2572,12 @@ PyDecType_FromDecimalExact(PyTypeObject *type, PyObject *v, PyObject *context) PyObject *dec; uint32_t status = 0; - decimal_state *state = get_module_state_by_def(type); + decimal_state *state = get_module_state_from_ctx(context); if (type == state->PyDec_Type && PyDec_CheckExact(state, v)) { return Py_NewRef(v); } - dec = PyDecType_New(type); + dec = PyDecType_New(state, type); if (dec == NULL) { return NULL; } @@ -2837,7 +2861,7 @@ dec_from_float(PyObject *type, PyObject *pyfloat) static PyObject * ctx_from_float(PyObject *context, PyObject *v) { - decimal_state *state = get_module_state_by_def(Py_TYPE(context)); + decimal_state *state = get_module_state_from_ctx(context); return PyDec_FromFloat(state, v, context); } @@ -2848,7 +2872,7 @@ dec_apply(PyObject *v, PyObject *context) PyObject *result; uint32_t status = 0; - decimal_state *state = get_module_state_by_def(Py_TYPE(context)); + decimal_state *state = get_module_state_from_ctx(context); result = dec_alloc(state); if (result == NULL) { return NULL; @@ -2875,7 +2899,7 @@ dec_apply(PyObject *v, PyObject *context) static PyObject * PyDecType_FromObjectExact(PyTypeObject *type, PyObject *v, PyObject *context) { - decimal_state *state = get_module_state_by_def(type); + decimal_state *state = get_module_state_from_ctx(context); if (v == NULL) { return PyDecType_FromSsizeExact(type, 0, context); } @@ -2910,7 +2934,7 @@ PyDecType_FromObjectExact(PyTypeObject *type, PyObject *v, PyObject *context) static PyObject * PyDec_FromObject(PyObject *v, PyObject *context) { - decimal_state *state = get_module_state_by_def(Py_TYPE(context)); + decimal_state *state = get_module_state_from_ctx(context); if (v == NULL) { return PyDec_FromSsize(state, 0, context); } @@ -2997,7 +3021,7 @@ ctx_create_decimal(PyObject *context, PyObject *args) Py_LOCAL_INLINE(int) convert_op(int type_err, PyObject **conv, PyObject *v, PyObject *context) { - decimal_state *state = get_module_state_by_def(Py_TYPE(context)); + decimal_state *state = get_module_state_from_ctx(context); if (PyDec_Check(state, v)) { *conv = Py_NewRef(v); return 1; @@ -3100,7 +3124,7 @@ multiply_by_denominator(PyObject *v, PyObject *r, PyObject *context) if (tmp == NULL) { return NULL; } - decimal_state *state = get_module_state_by_def(Py_TYPE(context)); + decimal_state *state = get_module_state_from_ctx(context); denom = PyDec_FromLongExact(state, tmp, context); Py_DECREF(tmp); if (denom == NULL) { @@ -3155,7 +3179,7 @@ numerator_as_decimal(PyObject *r, PyObject *context) return NULL; } - decimal_state *state = get_module_state_by_def(Py_TYPE(context)); + decimal_state *state = get_module_state_from_ctx(context); num = PyDec_FromLongExact(state, tmp, context); Py_DECREF(tmp); return num; @@ -3174,7 +3198,7 @@ convert_op_cmp(PyObject **vcmp, PyObject **wcmp, PyObject *v, PyObject *w, *vcmp = v; - decimal_state *state = get_module_state_by_def(Py_TYPE(context)); + decimal_state *state = get_module_state_from_ctx(context); if (PyDec_Check(state, w)) { *wcmp = Py_NewRef(w); } @@ -4414,12 +4438,11 @@ dec_conjugate(PyObject *self, PyObject *Py_UNUSED(dummy)) return Py_NewRef(self); } -static PyObject * -dec_mpd_radix(PyObject *self, PyObject *Py_UNUSED(dummy)) +static inline PyObject * +_dec_mpd_radix(decimal_state *state) { PyObject *result; - decimal_state *state = get_module_state_by_def(Py_TYPE(self)); result = dec_alloc(state); if (result == NULL) { return NULL; @@ -4429,6 +4452,13 @@ dec_mpd_radix(PyObject *self, PyObject *Py_UNUSED(dummy)) return result; } +static PyObject * +dec_mpd_radix(PyObject *self, PyObject *Py_UNUSED(dummy)) +{ + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); + return _dec_mpd_radix(state); +} + static PyObject * dec_mpd_qcopy_abs(PyObject *self, PyObject *Py_UNUSED(dummy)) { @@ -5041,6 +5071,7 @@ static PyMethodDef dec_methods [] = }; static PyType_Slot dec_slots[] = { + {Py_tp_token, Py_TP_USE_SPEC}, {Py_tp_dealloc, dec_dealloc}, {Py_tp_getattro, PyObject_GenericGetAttr}, {Py_tp_traverse, dec_traverse}, @@ -5130,7 +5161,7 @@ ctx_##MPDFUNC(PyObject *context, PyObject *v) \ \ CONVERT_OP_RAISE(&a, v, context); \ decimal_state *state = \ - get_module_state_by_def(Py_TYPE(context)); \ + get_module_state_from_ctx(context); \ if ((result = dec_alloc(state)) == NULL) { \ Py_DECREF(a); \ return NULL; \ @@ -5162,7 +5193,7 @@ ctx_##MPDFUNC(PyObject *context, PyObject *args) \ \ CONVERT_BINOP_RAISE(&a, &b, v, w, context); \ decimal_state *state = \ - get_module_state_by_def(Py_TYPE(context)); \ + get_module_state_from_ctx(context); \ if ((result = dec_alloc(state)) == NULL) { \ Py_DECREF(a); \ Py_DECREF(b); \ @@ -5198,7 +5229,7 @@ ctx_##MPDFUNC(PyObject *context, PyObject *args) \ \ CONVERT_BINOP_RAISE(&a, &b, v, w, context); \ decimal_state *state = \ - get_module_state_by_def(Py_TYPE(context)); \ + get_module_state_from_ctx(context); \ if ((result = dec_alloc(state)) == NULL) { \ Py_DECREF(a); \ Py_DECREF(b); \ @@ -5227,7 +5258,7 @@ ctx_##MPDFUNC(PyObject *context, PyObject *args) \ } \ \ CONVERT_TERNOP_RAISE(&a, &b, &c, v, w, x, context); \ - decimal_state *state = get_module_state_by_def(Py_TYPE(context)); \ + decimal_state *state = get_module_state_from_ctx(context); \ if ((result = dec_alloc(state)) == NULL) { \ Py_DECREF(a); \ Py_DECREF(b); \ @@ -5293,7 +5324,7 @@ ctx_mpd_qdivmod(PyObject *context, PyObject *args) } CONVERT_BINOP_RAISE(&a, &b, v, w, context); - decimal_state *state = get_module_state_by_def(Py_TYPE(context)); + decimal_state *state = get_module_state_from_ctx(context); q = dec_alloc(state); if (q == NULL) { Py_DECREF(a); @@ -5348,7 +5379,7 @@ ctx_mpd_qpow(PyObject *context, PyObject *args, PyObject *kwds) } } - decimal_state *state = get_module_state_by_def(Py_TYPE(context)); + decimal_state *state = get_module_state_from_ctx(context); result = dec_alloc(state); if (result == NULL) { Py_DECREF(a); @@ -5383,7 +5414,8 @@ DecCtx_TernaryFunc(mpd_qfma) static PyObject * ctx_mpd_radix(PyObject *context, PyObject *dummy) { - return dec_mpd_radix(context, dummy); + decimal_state *state = get_module_state_from_ctx(context); + return _dec_mpd_radix(state); } /* Boolean functions: single decimal argument */ @@ -5400,7 +5432,7 @@ DecCtx_BoolFunc_NO_CTX(mpd_iszero) static PyObject * ctx_iscanonical(PyObject *context, PyObject *v) { - decimal_state *state = get_module_state_by_def(Py_TYPE(context)); + decimal_state *state = get_module_state_from_ctx(context); if (!PyDec_Check(state, v)) { PyErr_SetString(PyExc_TypeError, "argument must be a Decimal"); @@ -5426,7 +5458,7 @@ PyDecContext_Apply(PyObject *context, PyObject *v) static PyObject * ctx_canonical(PyObject *context, PyObject *v) { - decimal_state *state = get_module_state_by_def(Py_TYPE(context)); + decimal_state *state = get_module_state_from_ctx(context); if (!PyDec_Check(state, v)) { PyErr_SetString(PyExc_TypeError, "argument must be a Decimal"); @@ -5443,7 +5475,7 @@ ctx_mpd_qcopy_abs(PyObject *context, PyObject *v) uint32_t status = 0; CONVERT_OP_RAISE(&a, v, context); - decimal_state *state = get_module_state_by_def(Py_TYPE(context)); + decimal_state *state = get_module_state_from_ctx(context); result = dec_alloc(state); if (result == NULL) { Py_DECREF(a); @@ -5476,7 +5508,7 @@ ctx_mpd_qcopy_negate(PyObject *context, PyObject *v) uint32_t status = 0; CONVERT_OP_RAISE(&a, v, context); - decimal_state *state = get_module_state_by_def(Py_TYPE(context)); + decimal_state *state = get_module_state_from_ctx(context); result = dec_alloc(state); if (result == NULL) { Py_DECREF(a); @@ -5573,7 +5605,7 @@ ctx_mpd_qcopy_sign(PyObject *context, PyObject *args) } CONVERT_BINOP_RAISE(&a, &b, v, w, context); - decimal_state *state = get_module_state_by_def(Py_TYPE(context)); + decimal_state *state = get_module_state_from_ctx(context); result = dec_alloc(state); if (result == NULL) { Py_DECREF(a); @@ -5728,6 +5760,7 @@ static PyMethodDef context_methods [] = }; static PyType_Slot context_slots[] = { + {Py_tp_token, Py_TP_USE_SPEC}, {Py_tp_dealloc, context_dealloc}, {Py_tp_traverse, context_traverse}, {Py_tp_clear, context_clear}, diff --git a/Modules/_functoolsmodule.c b/Modules/_functoolsmodule.c index 64766b474514bf..31cf7bcc09782c 100644 --- a/Modules/_functoolsmodule.c +++ b/Modules/_functoolsmodule.c @@ -25,6 +25,8 @@ class _functools._lru_cache_wrapper "PyObject *" "&lru_cache_type_spec" typedef struct _functools_state { /* this object is used delimit args and keywords in the cache keys */ PyObject *kwd_mark; + PyTypeObject *placeholder_type; + PyObject *placeholder; // strong reference (singleton) PyTypeObject *partial_type; PyTypeObject *keyobject_type; PyTypeObject *lru_list_elem_type; @@ -41,6 +43,95 @@ get_functools_state(PyObject *module) /* partial object **********************************************************/ + +// The 'Placeholder' singleton indicates which formal positional +// parameters are to be bound first when using a 'partial' object. + +typedef struct { + PyObject_HEAD +} placeholderobject; + +static inline _functools_state * +get_functools_state_by_type(PyTypeObject *type); + +PyDoc_STRVAR(placeholder_doc, +"The type of the Placeholder singleton.\n\n" +"Used as a placeholder for partial arguments."); + +static PyObject * +placeholder_repr(PyObject *op) +{ + return PyUnicode_FromString("Placeholder"); +} + +static PyObject * +placeholder_reduce(PyObject *op, PyObject *Py_UNUSED(ignored)) +{ + return PyUnicode_FromString("Placeholder"); +} + +static PyMethodDef placeholder_methods[] = { + {"__reduce__", placeholder_reduce, METH_NOARGS, NULL}, + {NULL, NULL} +}; + +static void +placeholder_dealloc(PyObject* self) +{ + PyObject_GC_UnTrack(self); + PyTypeObject *tp = Py_TYPE(self); + tp->tp_free((PyObject*)self); + Py_DECREF(tp); +} + +static PyObject * +placeholder_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) +{ + if (PyTuple_GET_SIZE(args) || (kwargs && PyDict_GET_SIZE(kwargs))) { + PyErr_SetString(PyExc_TypeError, "PlaceholderType takes no arguments"); + return NULL; + } + _functools_state *state = get_functools_state_by_type(type); + if (state->placeholder != NULL) { + return Py_NewRef(state->placeholder); + } + + PyObject *placeholder = PyType_GenericNew(type, NULL, NULL); + if (placeholder == NULL) { + return NULL; + } + + if (state->placeholder == NULL) { + state->placeholder = Py_NewRef(placeholder); + } + return placeholder; +} + +static int +placeholder_traverse(PyObject *self, visitproc visit, void *arg) +{ + Py_VISIT(Py_TYPE(self)); + return 0; +} + +static PyType_Slot placeholder_type_slots[] = { + {Py_tp_dealloc, placeholder_dealloc}, + {Py_tp_repr, placeholder_repr}, + {Py_tp_doc, (void *)placeholder_doc}, + {Py_tp_methods, placeholder_methods}, + {Py_tp_new, placeholder_new}, + {Py_tp_traverse, placeholder_traverse}, + {0, 0} +}; + +static PyType_Spec placeholder_type_spec = { + .name = "functools._PlaceholderType", + .basicsize = sizeof(placeholderobject), + .flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_IMMUTABLETYPE | Py_TPFLAGS_HAVE_GC, + .slots = placeholder_type_slots +}; + + typedef struct { PyObject_HEAD PyObject *fn; @@ -48,6 +139,8 @@ typedef struct { PyObject *kw; PyObject *dict; /* __dict__ */ PyObject *weakreflist; /* List of weak references */ + PyObject *placeholder; /* Placeholder for positional arguments */ + Py_ssize_t phcount; /* Number of placeholders */ vectorcallfunc vectorcall; } partialobject; @@ -70,23 +163,38 @@ get_functools_state_by_type(PyTypeObject *type) static PyObject * partial_new(PyTypeObject *type, PyObject *args, PyObject *kw) { - PyObject *func, *pargs, *nargs, *pkw; + PyObject *func, *pto_args, *new_args, *pto_kw, *phold; partialobject *pto; + Py_ssize_t pto_phcount = 0; + Py_ssize_t new_nargs = PyTuple_GET_SIZE(args) - 1; - if (PyTuple_GET_SIZE(args) < 1) { + if (new_nargs < 0) { PyErr_SetString(PyExc_TypeError, "type 'partial' takes at least one argument"); return NULL; } + func = PyTuple_GET_ITEM(args, 0); + if (!PyCallable_Check(func)) { + PyErr_SetString(PyExc_TypeError, + "the first argument must be callable"); + return NULL; + } _functools_state *state = get_functools_state_by_type(type); if (state == NULL) { return NULL; } + phold = state->placeholder; - pargs = pkw = NULL; - func = PyTuple_GET_ITEM(args, 0); + /* Placeholder restrictions */ + if (new_nargs && PyTuple_GET_ITEM(args, new_nargs) == phold) { + PyErr_SetString(PyExc_TypeError, + "trailing Placeholders are not allowed"); + return NULL; + } + /* check wrapped function / object */ + pto_args = pto_kw = NULL; int res = PyObject_TypeCheck(func, state->partial_type); if (res == -1) { return NULL; @@ -95,18 +203,14 @@ partial_new(PyTypeObject *type, PyObject *args, PyObject *kw) // We can use its underlying function directly and merge the arguments. partialobject *part = (partialobject *)func; if (part->dict == NULL) { - pargs = part->args; - pkw = part->kw; + pto_args = part->args; + pto_kw = part->kw; func = part->fn; - assert(PyTuple_Check(pargs)); - assert(PyDict_Check(pkw)); + pto_phcount = part->phcount; + assert(PyTuple_Check(pto_args)); + assert(PyDict_Check(pto_kw)); } } - if (!PyCallable_Check(func)) { - PyErr_SetString(PyExc_TypeError, - "the first argument must be callable"); - return NULL; - } /* create partialobject structure */ pto = (partialobject *)type->tp_alloc(type, 0); @@ -114,18 +218,58 @@ partial_new(PyTypeObject *type, PyObject *args, PyObject *kw) return NULL; pto->fn = Py_NewRef(func); + pto->placeholder = phold; - nargs = PyTuple_GetSlice(args, 1, PY_SSIZE_T_MAX); - if (nargs == NULL) { + new_args = PyTuple_GetSlice(args, 1, new_nargs + 1); + if (new_args == NULL) { Py_DECREF(pto); return NULL; } - if (pargs == NULL) { - pto->args = nargs; + + /* Count placeholders */ + Py_ssize_t phcount = 0; + for (Py_ssize_t i = 0; i < new_nargs - 1; i++) { + if (PyTuple_GET_ITEM(new_args, i) == phold) { + phcount++; + } + } + /* merge args with args of `func` which is `partial` */ + if (pto_phcount > 0 && new_nargs > 0) { + Py_ssize_t npargs = PyTuple_GET_SIZE(pto_args); + Py_ssize_t tot_nargs = npargs; + if (new_nargs > pto_phcount) { + tot_nargs += new_nargs - pto_phcount; + } + PyObject *item; + PyObject *tot_args = PyTuple_New(tot_nargs); + for (Py_ssize_t i = 0, j = 0; i < tot_nargs; i++) { + if (i < npargs) { + item = PyTuple_GET_ITEM(pto_args, i); + if (j < new_nargs && item == phold) { + item = PyTuple_GET_ITEM(new_args, j); + j++; + pto_phcount--; + } + } + else { + item = PyTuple_GET_ITEM(new_args, j); + j++; + } + Py_INCREF(item); + PyTuple_SET_ITEM(tot_args, i, item); + } + pto->args = tot_args; + pto->phcount = pto_phcount + phcount; + Py_DECREF(new_args); + } + else if (pto_args == NULL) { + pto->args = new_args; + pto->phcount = phcount; } else { - pto->args = PySequence_Concat(pargs, nargs); - Py_DECREF(nargs); + pto->args = PySequence_Concat(pto_args, new_args); + pto->phcount = pto_phcount + phcount; + Py_DECREF(new_args); if (pto->args == NULL) { Py_DECREF(pto); return NULL; @@ -133,7 +277,7 @@ partial_new(PyTypeObject *type, PyObject *args, PyObject *kw) assert(PyTuple_Check(pto->args)); } - if (pkw == NULL || PyDict_GET_SIZE(pkw) == 0) { + if (pto_kw == NULL || PyDict_GET_SIZE(pto_kw) == 0) { if (kw == NULL) { pto->kw = PyDict_New(); } @@ -145,7 +289,7 @@ partial_new(PyTypeObject *type, PyObject *args, PyObject *kw) } } else { - pto->kw = PyDict_Copy(pkw); + pto->kw = PyDict_Copy(pto_kw); if (kw != NULL && pto->kw != NULL) { if (PyDict_Merge(pto->kw, kw, 1) != 0) { Py_DECREF(pto); @@ -225,23 +369,30 @@ partial_vectorcall(partialobject *pto, PyObject *const *args, size_t nargsf, PyObject *kwnames) { PyThreadState *tstate = _PyThreadState_GET(); + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); /* pto->kw is mutable, so need to check every time */ if (PyDict_GET_SIZE(pto->kw)) { return partial_vectorcall_fallback(tstate, pto, args, nargsf, kwnames); } + Py_ssize_t pto_phcount = pto->phcount; + if (nargs < pto_phcount) { + PyErr_Format(PyExc_TypeError, + "missing positional arguments in 'partial' call; " + "expected at least %zd, got %zd", pto_phcount, nargs); + return NULL; + } - Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); - Py_ssize_t nargs_total = nargs; + Py_ssize_t nargskw = nargs; if (kwnames != NULL) { - nargs_total += PyTuple_GET_SIZE(kwnames); + nargskw += PyTuple_GET_SIZE(kwnames); } PyObject **pto_args = _PyTuple_ITEMS(pto->args); Py_ssize_t pto_nargs = PyTuple_GET_SIZE(pto->args); /* Fast path if we're called without arguments */ - if (nargs_total == 0) { + if (nargskw == 0) { return _PyObject_VectorcallTstate(tstate, pto->fn, pto_args, pto_nargs, NULL); } @@ -258,29 +409,47 @@ partial_vectorcall(partialobject *pto, PyObject *const *args, return ret; } - Py_ssize_t newnargs_total = pto_nargs + nargs_total; - PyObject *small_stack[_PY_FASTCALL_SMALL_STACK]; - PyObject *ret; PyObject **stack; - if (newnargs_total <= (Py_ssize_t)Py_ARRAY_LENGTH(small_stack)) { + Py_ssize_t tot_nargskw = pto_nargs + nargskw - pto_phcount; + if (tot_nargskw <= (Py_ssize_t)Py_ARRAY_LENGTH(small_stack)) { stack = small_stack; } else { - stack = PyMem_Malloc(newnargs_total * sizeof(PyObject *)); + stack = PyMem_Malloc(tot_nargskw * sizeof(PyObject *)); if (stack == NULL) { PyErr_NoMemory(); return NULL; } } - /* Copy to new stack, using borrowed references */ - memcpy(stack, pto_args, pto_nargs * sizeof(PyObject*)); - memcpy(stack + pto_nargs, args, nargs_total * sizeof(PyObject*)); - - ret = _PyObject_VectorcallTstate(tstate, pto->fn, - stack, pto_nargs + nargs, kwnames); + Py_ssize_t tot_nargs; + if (pto_phcount) { + tot_nargs = pto_nargs + nargs - pto_phcount; + Py_ssize_t j = 0; // New args index + for (Py_ssize_t i = 0; i < pto_nargs; i++) { + if (pto_args[i] == pto->placeholder) { + stack[i] = args[j]; + j += 1; + } + else { + stack[i] = pto_args[i]; + } + } + assert(j == pto_phcount); + if (nargskw > pto_phcount) { + memcpy(stack + pto_nargs, args + j, (nargskw - j) * sizeof(PyObject*)); + } + } + else { + tot_nargs = pto_nargs + nargs; + /* Copy to new stack, using borrowed references */ + memcpy(stack, pto_args, pto_nargs * sizeof(PyObject*)); + memcpy(stack + pto_nargs, args, nargskw * sizeof(PyObject*)); + } + PyObject *ret = _PyObject_VectorcallTstate(tstate, pto->fn, + stack, tot_nargs, kwnames); if (stack != small_stack) { PyMem_Free(stack); } @@ -312,40 +481,81 @@ partial_call(partialobject *pto, PyObject *args, PyObject *kwargs) assert(PyTuple_Check(pto->args)); assert(PyDict_Check(pto->kw)); + Py_ssize_t nargs = PyTuple_GET_SIZE(args); + Py_ssize_t pto_phcount = pto->phcount; + if (nargs < pto_phcount) { + PyErr_Format(PyExc_TypeError, + "missing positional arguments in 'partial' call; " + "expected at least %zd, got %zd", pto_phcount, nargs); + return NULL; + } + /* Merge keywords */ - PyObject *kwargs2; + PyObject *tot_kw; if (PyDict_GET_SIZE(pto->kw) == 0) { /* kwargs can be NULL */ - kwargs2 = Py_XNewRef(kwargs); + tot_kw = Py_XNewRef(kwargs); } else { /* bpo-27840, bpo-29318: dictionary of keyword parameters must be copied, because a function using "**kwargs" can modify the dictionary. */ - kwargs2 = PyDict_Copy(pto->kw); - if (kwargs2 == NULL) { + tot_kw = PyDict_Copy(pto->kw); + if (tot_kw == NULL) { return NULL; } if (kwargs != NULL) { - if (PyDict_Merge(kwargs2, kwargs, 1) != 0) { - Py_DECREF(kwargs2); + if (PyDict_Merge(tot_kw, kwargs, 1) != 0) { + Py_DECREF(tot_kw); return NULL; } } } /* Merge positional arguments */ - /* Note: tupleconcat() is optimized for empty tuples */ - PyObject *args2 = PySequence_Concat(pto->args, args); - if (args2 == NULL) { - Py_XDECREF(kwargs2); - return NULL; + PyObject *tot_args; + if (pto_phcount) { + Py_ssize_t pto_nargs = PyTuple_GET_SIZE(pto->args); + Py_ssize_t tot_nargs = pto_nargs + nargs - pto_phcount; + assert(tot_nargs >= 0); + tot_args = PyTuple_New(tot_nargs); + if (tot_args == NULL) { + Py_XDECREF(tot_kw); + return NULL; + } + PyObject *pto_args = pto->args; + PyObject *item; + Py_ssize_t j = 0; // New args index + for (Py_ssize_t i = 0; i < pto_nargs; i++) { + item = PyTuple_GET_ITEM(pto_args, i); + if (item == pto->placeholder) { + item = PyTuple_GET_ITEM(args, j); + j += 1; + } + Py_INCREF(item); + PyTuple_SET_ITEM(tot_args, i, item); + } + assert(j == pto_phcount); + for (Py_ssize_t i = pto_nargs; i < tot_nargs; i++) { + item = PyTuple_GET_ITEM(args, j); + Py_INCREF(item); + PyTuple_SET_ITEM(tot_args, i, item); + j += 1; + } + } + else { + /* Note: tupleconcat() is optimized for empty tuples */ + tot_args = PySequence_Concat(pto->args, args); + if (tot_args == NULL) { + Py_XDECREF(tot_kw); + return NULL; + } } - PyObject *res = PyObject_Call(pto->fn, args2, kwargs2); - Py_DECREF(args2); - Py_XDECREF(kwargs2); + PyObject *res = PyObject_Call(pto->fn, tot_args, tot_kw); + Py_DECREF(tot_args); + Py_XDECREF(tot_kw); return res; } @@ -461,8 +671,11 @@ partial_setstate(partialobject *pto, PyObject *state) { PyObject *fn, *fnargs, *kw, *dict; - if (!PyTuple_Check(state) || - !PyArg_ParseTuple(state, "OOOO", &fn, &fnargs, &kw, &dict) || + if (!PyTuple_Check(state)) { + PyErr_SetString(PyExc_TypeError, "invalid partial state"); + return NULL; + } + if (!PyArg_ParseTuple(state, "OOOO", &fn, &fnargs, &kw, &dict) || !PyCallable_Check(fn) || !PyTuple_Check(fnargs) || (kw != Py_None && !PyDict_Check(kw))) @@ -471,6 +684,20 @@ partial_setstate(partialobject *pto, PyObject *state) return NULL; } + Py_ssize_t nargs = PyTuple_GET_SIZE(fnargs); + if (nargs && PyTuple_GET_ITEM(fnargs, nargs - 1) == pto->placeholder) { + PyErr_SetString(PyExc_TypeError, + "trailing Placeholders are not allowed"); + return NULL; + } + /* Count placeholders */ + Py_ssize_t phcount = 0; + for (Py_ssize_t i = 0; i < nargs - 1; i++) { + if (PyTuple_GET_ITEM(fnargs, i) == pto->placeholder) { + phcount++; + } + } + if(!PyTuple_CheckExact(fnargs)) fnargs = PySequence_Tuple(fnargs); else @@ -493,10 +720,10 @@ partial_setstate(partialobject *pto, PyObject *state) dict = NULL; else Py_INCREF(dict); - Py_SETREF(pto->fn, Py_NewRef(fn)); Py_SETREF(pto->args, fnargs); Py_SETREF(pto->kw, kw); + pto->phcount = phcount; Py_XSETREF(pto->dict, dict); partial_setvectorcall(pto); Py_RETURN_NONE; @@ -1498,6 +1725,25 @@ _functools_exec(PyObject *module) return -1; } + state->placeholder_type = (PyTypeObject *)PyType_FromModuleAndSpec(module, + &placeholder_type_spec, NULL); + if (state->placeholder_type == NULL) { + return -1; + } + if (PyModule_AddType(module, state->placeholder_type) < 0) { + return -1; + } + + PyObject *placeholder = PyObject_CallNoArgs((PyObject *)state->placeholder_type); + if (placeholder == NULL) { + return -1; + } + if (PyModule_AddObjectRef(module, "Placeholder", placeholder) < 0) { + Py_DECREF(placeholder); + return -1; + } + Py_DECREF(placeholder); + state->partial_type = (PyTypeObject *)PyType_FromModuleAndSpec(module, &partial_type_spec, NULL); if (state->partial_type == NULL) { @@ -1542,6 +1788,8 @@ _functools_traverse(PyObject *module, visitproc visit, void *arg) { _functools_state *state = get_functools_state(module); Py_VISIT(state->kwd_mark); + Py_VISIT(state->placeholder_type); + Py_VISIT(state->placeholder); Py_VISIT(state->partial_type); Py_VISIT(state->keyobject_type); Py_VISIT(state->lru_list_elem_type); @@ -1553,6 +1801,8 @@ _functools_clear(PyObject *module) { _functools_state *state = get_functools_state(module); Py_CLEAR(state->kwd_mark); + Py_CLEAR(state->placeholder_type); + Py_CLEAR(state->placeholder); Py_CLEAR(state->partial_type); Py_CLEAR(state->keyobject_type); Py_CLEAR(state->lru_list_elem_type); diff --git a/Modules/_pickle.c b/Modules/_pickle.c index 18affdd4875f3b..b2bd9545c1b130 100644 --- a/Modules/_pickle.c +++ b/Modules/_pickle.c @@ -2146,7 +2146,7 @@ save_long(PicklerObject *self, PyObject *obj) if (self->proto >= 2) { /* Linear-time pickling. */ - uint64_t nbits; + int64_t nbits; size_t nbytes; unsigned char *pdata; char header[5]; @@ -2161,8 +2161,8 @@ save_long(PicklerObject *self, PyObject *obj) return 0; } nbits = _PyLong_NumBits(obj); - if (nbits == (uint64_t)-1 && PyErr_Occurred()) - goto error; + assert(nbits >= 0); + assert(!PyErr_Occurred()); /* How many bytes do we need? There are nbits >> 3 full * bytes of data, and nbits & 7 leftover bits. If there * are any leftover bits, then we clearly need another diff --git a/Modules/_randommodule.c b/Modules/_randommodule.c index 3835a3072d96c6..ad66df47349db0 100644 --- a/Modules/_randommodule.c +++ b/Modules/_randommodule.c @@ -295,7 +295,7 @@ random_seed(RandomObject *self, PyObject *arg) int result = -1; /* guilty until proved innocent */ PyObject *n = NULL; uint32_t *key = NULL; - uint64_t bits; + int64_t bits; size_t keyused; int res; @@ -335,8 +335,8 @@ random_seed(RandomObject *self, PyObject *arg) /* Now split n into 32-bit chunks, from the right. */ bits = _PyLong_NumBits(n); - if (bits == (uint64_t)-1 && PyErr_Occurred()) - goto Done; + assert(bits >= 0); + assert(!PyErr_Occurred()); /* Figure out how many 32-bit chunks this gives us. */ keyused = bits == 0 ? 1 : (size_t)((bits - 1) / 32 + 1); diff --git a/Modules/_testcapi/codec.c b/Modules/_testcapi/codec.c index d13f51e20331a1..e27e64e066c458 100644 --- a/Modules/_testcapi/codec.c +++ b/Modules/_testcapi/codec.c @@ -1,15 +1,219 @@ #include "parts.h" #include "util.h" +// === Codecs registration and un-registration ================================ + +static PyObject * +codec_register(PyObject *Py_UNUSED(module), PyObject *search_function) +{ + if (PyCodec_Register(search_function) < 0) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject * +codec_unregister(PyObject *Py_UNUSED(module), PyObject *search_function) +{ + if (PyCodec_Unregister(search_function) < 0) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject * +codec_known_encoding(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *NULL_WOULD_RAISE(encoding); // NULL case will be tested + if (!PyArg_ParseTuple(args, "z", &encoding)) { + return NULL; + } + return PyCodec_KnownEncoding(encoding) ? Py_True : Py_False; +} + +// === Codecs encoding and decoding interfaces ================================ + +static PyObject * +codec_encode(PyObject *Py_UNUSED(module), PyObject *args) +{ + PyObject *input; + const char *NULL_WOULD_RAISE(encoding); // NULL case will be tested + const char *errors; // can be NULL + if (!PyArg_ParseTuple(args, "O|zz", &input, &encoding, &errors)) { + return NULL; + } + return PyCodec_Encode(input, encoding, errors); +} + +static PyObject * +codec_decode(PyObject *Py_UNUSED(module), PyObject *args) +{ + PyObject *input; + const char *NULL_WOULD_RAISE(encoding); // NULL case will be tested + const char *errors; // can be NULL + if (!PyArg_ParseTuple(args, "O|zz", &input, &encoding, &errors)) { + return NULL; + } + return PyCodec_Decode(input, encoding, errors); +} + +static PyObject * +codec_encoder(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *NULL_WOULD_RAISE(encoding); // NULL case will be tested + if (!PyArg_ParseTuple(args, "z", &encoding)) { + return NULL; + } + return PyCodec_Encoder(encoding); +} + +static PyObject * +codec_decoder(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *NULL_WOULD_RAISE(encoding); // NULL case will be tested + if (!PyArg_ParseTuple(args, "z", &encoding)) { + return NULL; + } + return PyCodec_Decoder(encoding); +} + +static PyObject * +codec_incremental_encoder(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *NULL_WOULD_RAISE(encoding); // NULL case will be tested + const char *errors; // can be NULL + if (!PyArg_ParseTuple(args, "zz", &encoding, &errors)) { + return NULL; + } + return PyCodec_IncrementalEncoder(encoding, errors); +} + +static PyObject * +codec_incremental_decoder(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *NULL_WOULD_RAISE(encoding); // NULL case will be tested + const char *errors; // can be NULL + if (!PyArg_ParseTuple(args, "zz", &encoding, &errors)) { + return NULL; + } + return PyCodec_IncrementalDecoder(encoding, errors); +} + +static PyObject * +codec_stream_reader(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *NULL_WOULD_RAISE(encoding); // NULL case will be tested + PyObject *stream; + const char *errors; // can be NULL + if (!PyArg_ParseTuple(args, "zOz", &encoding, &stream, &errors)) { + return NULL; + } + return PyCodec_StreamReader(encoding, stream, errors); +} + +static PyObject * +codec_stream_writer(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *NULL_WOULD_RAISE(encoding); // NULL case will be tested + PyObject *stream; + const char *errors; // can be NULL + if (!PyArg_ParseTuple(args, "zOz", &encoding, &stream, &errors)) { + return NULL; + } + return PyCodec_StreamWriter(encoding, stream, errors); +} + +// === Codecs errors handlers ================================================= + +static PyObject * +codec_register_error(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *encoding; // must not be NULL + PyObject *error; + if (!PyArg_ParseTuple(args, "sO", &encoding, &error)) { + return NULL; + } + if (PyCodec_RegisterError(encoding, error) < 0) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject * +codec_lookup_error(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *NULL_WOULD_RAISE(encoding); // NULL case will be tested + if (!PyArg_ParseTuple(args, "z", &encoding)) { + return NULL; + } + return PyCodec_LookupError(encoding); +} + +static PyObject * +codec_strict_errors(PyObject *Py_UNUSED(module), PyObject *exc) +{ + assert(exc != NULL); + return PyCodec_StrictErrors(exc); +} + +static PyObject * +codec_ignore_errors(PyObject *Py_UNUSED(module), PyObject *exc) +{ + assert(exc != NULL); + return PyCodec_IgnoreErrors(exc); +} + +static PyObject * +codec_replace_errors(PyObject *Py_UNUSED(module), PyObject *exc) +{ + assert(exc != NULL); + return PyCodec_ReplaceErrors(exc); +} + +static PyObject * +codec_xmlcharrefreplace_errors(PyObject *Py_UNUSED(module), PyObject *exc) +{ + assert(exc != NULL); + return PyCodec_XMLCharRefReplaceErrors(exc); +} + +static PyObject * +codec_backslashreplace_errors(PyObject *Py_UNUSED(module), PyObject *exc) +{ + assert(exc != NULL); + return PyCodec_BackslashReplaceErrors(exc); +} static PyMethodDef test_methods[] = { - {NULL}, + /* codecs registration */ + {"codec_register", codec_register, METH_O}, + {"codec_unregister", codec_unregister, METH_O}, + {"codec_known_encoding", codec_known_encoding, METH_VARARGS}, + /* encoding and decoding interface */ + {"codec_encode", codec_encode, METH_VARARGS}, + {"codec_decode", codec_decode, METH_VARARGS}, + {"codec_encoder", codec_encoder, METH_VARARGS}, + {"codec_decoder", codec_decoder, METH_VARARGS}, + {"codec_incremental_encoder", codec_incremental_encoder, METH_VARARGS}, + {"codec_incremental_decoder", codec_incremental_decoder, METH_VARARGS}, + {"codec_stream_reader", codec_stream_reader, METH_VARARGS}, + {"codec_stream_writer", codec_stream_writer, METH_VARARGS}, + /* error handling */ + {"codec_register_error", codec_register_error, METH_VARARGS}, + {"codec_lookup_error", codec_lookup_error, METH_VARARGS}, + {"codec_strict_errors", codec_strict_errors, METH_O}, + {"codec_ignore_errors", codec_ignore_errors, METH_O}, + {"codec_replace_errors", codec_replace_errors, METH_O}, + {"codec_xmlcharrefreplace_errors", codec_xmlcharrefreplace_errors, METH_O}, + {"codec_backslashreplace_errors", codec_backslashreplace_errors, METH_O}, + // PyCodec_NameReplaceErrors() is tested in _testlimitedcapi/codec.c + {NULL, NULL, 0, NULL}, }; int _PyTestCapi_Init_Codec(PyObject *m) { - if (PyModule_AddFunctions(m, test_methods) < 0){ + if (PyModule_AddFunctions(m, test_methods) < 0) { return -1; } diff --git a/Modules/_testcapi/util.h b/Modules/_testcapi/util.h index f26d7656a10138..042e522542eddb 100644 --- a/Modules/_testcapi/util.h +++ b/Modules/_testcapi/util.h @@ -31,3 +31,13 @@ static const char uninitialized[] = "uninitialized"; #define UNINITIALIZED_SIZE ((Py_ssize_t)236892191) /* Marker to check that integer value was set. */ #define UNINITIALIZED_INT (63256717) +/* + * Marker to indicate that a NULL parameter would not be allowed + * at runtime but that the test interface will check that it is + * indeed the case. + * + * Use this macro only if passing NULL to the C API would raise + * a catchable exception (and not a fatal exception that would + * crash the interpreter). + */ + #define NULL_WOULD_RAISE(NAME) NAME diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c index 0451688a46c75f..c403075fbb2501 100644 --- a/Modules/_testinternalcapi.c +++ b/Modules/_testinternalcapi.c @@ -681,13 +681,13 @@ set_eval_frame_default(PyObject *self, PyObject *Py_UNUSED(args)) static PyObject * record_eval(PyThreadState *tstate, struct _PyInterpreterFrame *f, int exc) { - if (PyFunction_Check(f->f_funcobj)) { + if (PyStackRef_FunctionCheck(f->f_funcobj)) { + PyFunctionObject *func = _PyFrame_GetFunction(f); PyObject *module = _get_current_module(); assert(module != NULL); module_state *state = get_module_state(module); Py_DECREF(module); - int res = PyList_Append(state->record_list, - ((PyFunctionObject *)f->f_funcobj)->func_name); + int res = PyList_Append(state->record_list, func->func_name); if (res < 0) { return NULL; } diff --git a/Modules/_testlimitedcapi.c b/Modules/_testlimitedcapi.c index e74cbfe19871bf..ba83a23117b2a5 100644 --- a/Modules/_testlimitedcapi.c +++ b/Modules/_testlimitedcapi.c @@ -38,6 +38,9 @@ PyInit__testlimitedcapi(void) if (_PyTestLimitedCAPI_Init_Bytes(mod) < 0) { return NULL; } + if (_PyTestLimitedCAPI_Init_Codec(mod) < 0) { + return NULL; + } if (_PyTestLimitedCAPI_Init_Complex(mod) < 0) { return NULL; } diff --git a/Modules/_testlimitedcapi/codec.c b/Modules/_testlimitedcapi/codec.c new file mode 100644 index 00000000000000..fdc18eedc2d288 --- /dev/null +++ b/Modules/_testlimitedcapi/codec.c @@ -0,0 +1,29 @@ +#include "pyconfig.h" // Py_GIL_DISABLED + +// Need limited C API version 3.5 for PyCodec_NameReplaceErrors() +#if !defined(Py_GIL_DISABLED) && !defined(Py_LIMITED_API) +# define Py_LIMITED_API 0x03050000 +#endif + +#include "parts.h" + +static PyObject * +codec_namereplace_errors(PyObject *Py_UNUSED(module), PyObject *exc) +{ + assert(exc != NULL); + return PyCodec_NameReplaceErrors(exc); +} + +static PyMethodDef test_methods[] = { + {"codec_namereplace_errors", codec_namereplace_errors, METH_O}, + {NULL}, +}; + +int +_PyTestLimitedCAPI_Init_Codec(PyObject *module) +{ + if (PyModule_AddFunctions(module, test_methods) < 0) { + return -1; + } + return 0; +} diff --git a/Modules/_testlimitedcapi/parts.h b/Modules/_testlimitedcapi/parts.h index 12b890853803f4..4107b150c5b4e0 100644 --- a/Modules/_testlimitedcapi/parts.h +++ b/Modules/_testlimitedcapi/parts.h @@ -25,6 +25,7 @@ int _PyTestLimitedCAPI_Init_Abstract(PyObject *module); int _PyTestLimitedCAPI_Init_ByteArray(PyObject *module); int _PyTestLimitedCAPI_Init_Bytes(PyObject *module); +int _PyTestLimitedCAPI_Init_Codec(PyObject *module); int _PyTestLimitedCAPI_Init_Complex(PyObject *module); int _PyTestLimitedCAPI_Init_Dict(PyObject *module); int _PyTestLimitedCAPI_Init_Eval(PyObject *module); diff --git a/Modules/clinic/_codecsmodule.c.h b/Modules/clinic/_codecsmodule.c.h index 1c0f37442ab350..01855aec5e123e 100644 --- a/Modules/clinic/_codecsmodule.c.h +++ b/Modules/clinic/_codecsmodule.c.h @@ -2683,6 +2683,56 @@ _codecs_register_error(PyObject *module, PyObject *const *args, Py_ssize_t nargs return return_value; } +PyDoc_STRVAR(_codecs__unregister_error__doc__, +"_unregister_error($module, errors, /)\n" +"--\n" +"\n" +"Un-register the specified error handler for the error handling `errors\'.\n" +"\n" +"Only custom error handlers can be un-registered. An exception is raised\n" +"if the error handling is a built-in one (e.g., \'strict\'), or if an error\n" +"occurs.\n" +"\n" +"Otherwise, this returns True if a custom handler has been successfully\n" +"un-registered, and False if no custom handler for the specified error\n" +"handling exists."); + +#define _CODECS__UNREGISTER_ERROR_METHODDEF \ + {"_unregister_error", (PyCFunction)_codecs__unregister_error, METH_O, _codecs__unregister_error__doc__}, + +static int +_codecs__unregister_error_impl(PyObject *module, const char *errors); + +static PyObject * +_codecs__unregister_error(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + const char *errors; + int _return_value; + + if (!PyUnicode_Check(arg)) { + _PyArg_BadArgument("_unregister_error", "argument", "str", arg); + goto exit; + } + Py_ssize_t errors_length; + errors = PyUnicode_AsUTF8AndSize(arg, &errors_length); + if (errors == NULL) { + goto exit; + } + if (strlen(errors) != (size_t)errors_length) { + PyErr_SetString(PyExc_ValueError, "embedded null character"); + goto exit; + } + _return_value = _codecs__unregister_error_impl(module, errors); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyBool_FromLong((long)_return_value); + +exit: + return return_value; +} + PyDoc_STRVAR(_codecs_lookup_error__doc__, "lookup_error($module, name, /)\n" "--\n" @@ -2746,4 +2796,4 @@ _codecs_lookup_error(PyObject *module, PyObject *arg) #ifndef _CODECS_CODE_PAGE_ENCODE_METHODDEF #define _CODECS_CODE_PAGE_ENCODE_METHODDEF #endif /* !defined(_CODECS_CODE_PAGE_ENCODE_METHODDEF) */ -/*[clinic end generated code: output=e50d5fdf65bd45fa input=a9049054013a1b77]*/ +/*[clinic end generated code: output=b3013d4709d96ffe input=a9049054013a1b77]*/ diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h index 9722f06a5935b9..749fe54598cc39 100644 --- a/Modules/clinic/posixmodule.c.h +++ b/Modules/clinic/posixmodule.c.h @@ -5954,7 +5954,7 @@ os_wait(PyObject *module, PyObject *Py_UNUSED(ignored)) #endif /* defined(HAVE_WAIT) */ -#if (defined(__linux__) && defined(__NR_pidfd_open)) +#if (defined(__linux__) && defined(__NR_pidfd_open) && !(defined(__ANDROID__) && __ANDROID_API__ < 31)) PyDoc_STRVAR(os_pidfd_open__doc__, "pidfd_open($module, /, pid, flags=0)\n" @@ -6013,7 +6013,7 @@ os_pidfd_open(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObjec return return_value; } -#endif /* (defined(__linux__) && defined(__NR_pidfd_open)) */ +#endif /* (defined(__linux__) && defined(__NR_pidfd_open) && !(defined(__ANDROID__) && __ANDROID_API__ < 31)) */ #if defined(HAVE_SETNS) @@ -12837,4 +12837,4 @@ os__create_environ(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef OS__SUPPORTS_VIRTUAL_TERMINAL_METHODDEF #define OS__SUPPORTS_VIRTUAL_TERMINAL_METHODDEF #endif /* !defined(OS__SUPPORTS_VIRTUAL_TERMINAL_METHODDEF) */ -/*[clinic end generated code: output=a736ad3f7205176e input=a9049054013a1b77]*/ +/*[clinic end generated code: output=b93bbaaa8eb5b0ce input=a9049054013a1b77]*/ diff --git a/Modules/clinic/signalmodule.c.h b/Modules/clinic/signalmodule.c.h index 1d3a143dfd8d39..986c0289f2bfcb 100644 --- a/Modules/clinic/signalmodule.c.h +++ b/Modules/clinic/signalmodule.c.h @@ -670,7 +670,7 @@ signal_pthread_kill(PyObject *module, PyObject *const *args, Py_ssize_t nargs) #endif /* defined(HAVE_PTHREAD_KILL) */ -#if (defined(__linux__) && defined(__NR_pidfd_send_signal)) +#if (defined(__linux__) && defined(__NR_pidfd_send_signal) && !(defined(__ANDROID__) && __ANDROID_API__ < 31)) PyDoc_STRVAR(signal_pidfd_send_signal__doc__, "pidfd_send_signal($module, pidfd, signalnum, siginfo=None, flags=0, /)\n" @@ -723,7 +723,7 @@ signal_pidfd_send_signal(PyObject *module, PyObject *const *args, Py_ssize_t nar return return_value; } -#endif /* (defined(__linux__) && defined(__NR_pidfd_send_signal)) */ +#endif /* (defined(__linux__) && defined(__NR_pidfd_send_signal) && !(defined(__ANDROID__) && __ANDROID_API__ < 31)) */ #ifndef SIGNAL_ALARM_METHODDEF #define SIGNAL_ALARM_METHODDEF @@ -776,4 +776,4 @@ signal_pidfd_send_signal(PyObject *module, PyObject *const *args, Py_ssize_t nar #ifndef SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF #define SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF #endif /* !defined(SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF) */ -/*[clinic end generated code: output=6d8e17a32cef668f input=a9049054013a1b77]*/ +/*[clinic end generated code: output=c57b4b98fad6f4b8 input=a9049054013a1b77]*/ diff --git a/Modules/itertoolsmodule.c b/Modules/itertoolsmodule.c index e740ec4d7625c3..1201fa094902d7 100644 --- a/Modules/itertoolsmodule.c +++ b/Modules/itertoolsmodule.c @@ -1036,7 +1036,7 @@ itertools_tee_impl(PyObject *module, PyObject *iterable, Py_ssize_t n) /*[clinic end generated code: output=1c64519cd859c2f0 input=c99a1472c425d66d]*/ { Py_ssize_t i; - PyObject *it, *copyable, *copyfunc, *result; + PyObject *it, *to, *result; if (n < 0) { PyErr_SetString(PyExc_ValueError, "n must be >= 0"); @@ -1053,41 +1053,23 @@ itertools_tee_impl(PyObject *module, PyObject *iterable, Py_ssize_t n) return NULL; } - if (PyObject_GetOptionalAttr(it, &_Py_ID(__copy__), ©func) < 0) { - Py_DECREF(it); + itertools_state *state = get_module_state(module); + to = tee_fromiterable(state, it); + Py_DECREF(it); + if (to == NULL) { Py_DECREF(result); return NULL; } - if (copyfunc != NULL) { - copyable = it; - } - else { - itertools_state *state = get_module_state(module); - copyable = tee_fromiterable(state, it); - Py_DECREF(it); - if (copyable == NULL) { - Py_DECREF(result); - return NULL; - } - copyfunc = PyObject_GetAttr(copyable, &_Py_ID(__copy__)); - if (copyfunc == NULL) { - Py_DECREF(copyable); - Py_DECREF(result); - return NULL; - } - } - PyTuple_SET_ITEM(result, 0, copyable); + PyTuple_SET_ITEM(result, 0, to); for (i = 1; i < n; i++) { - copyable = _PyObject_CallNoArgs(copyfunc); - if (copyable == NULL) { - Py_DECREF(copyfunc); + to = tee_copy((teeobject *)to, NULL); + if (to == NULL) { Py_DECREF(result); return NULL; } - PyTuple_SET_ITEM(result, i, copyable); + PyTuple_SET_ITEM(result, i, to); } - Py_DECREF(copyfunc); return result; } diff --git a/Modules/mathmodule.c b/Modules/mathmodule.c index baf2dc439b8959..058f57770755aa 100644 --- a/Modules/mathmodule.c +++ b/Modules/mathmodule.c @@ -1657,7 +1657,7 @@ math_isqrt(PyObject *module, PyObject *n) /*[clinic end generated code: output=35a6f7f980beab26 input=5b6e7ae4fa6c43d6]*/ { int a_too_large, c_bit_length; - uint64_t c, d; + int64_t c, d; uint64_t m; uint32_t u; PyObject *a = NULL, *b; @@ -1680,14 +1680,13 @@ math_isqrt(PyObject *module, PyObject *n) /* c = (n.bit_length() - 1) // 2 */ c = _PyLong_NumBits(n); - if (c == (uint64_t)(-1)) { - goto error; - } - c = (c - 1U) / 2U; + assert(c > 0); + assert(!PyErr_Occurred()); + c = (c - 1) / 2; /* Fast path: if c <= 31 then n < 2**64 and we can compute directly with a fast, almost branch-free algorithm. */ - if (c <= 31U) { + if (c <= 31) { int shift = 31 - (int)c; m = (uint64_t)PyLong_AsUnsignedLongLong(n); Py_DECREF(n); @@ -1704,13 +1703,13 @@ math_isqrt(PyObject *module, PyObject *n) /* From n >= 2**64 it follows that c.bit_length() >= 6. */ c_bit_length = 6; - while ((c >> c_bit_length) > 0U) { + while ((c >> c_bit_length) > 0) { ++c_bit_length; } /* Initialise d and a. */ d = c >> (c_bit_length - 5); - b = _PyLong_Rshift(n, 2U*c - 62U); + b = _PyLong_Rshift(n, 2*c - 62); if (b == NULL) { goto error; } @@ -1727,12 +1726,12 @@ math_isqrt(PyObject *module, PyObject *n) for (int s = c_bit_length - 6; s >= 0; --s) { PyObject *q; - uint64_t e = d; + int64_t e = d; d = c >> s; /* q = (n >> 2*c - e - d + 1) // a */ - q = _PyLong_Rshift(n, 2U*c - d - e + 1U); + q = _PyLong_Rshift(n, 2*c - d - e + 1); if (q == NULL) { goto error; } @@ -1742,7 +1741,7 @@ math_isqrt(PyObject *module, PyObject *n) } /* a = (a << d - 1 - e) + q */ - Py_SETREF(a, _PyLong_Lshift(a, d - 1U - e)); + Py_SETREF(a, _PyLong_Lshift(a, d - 1 - e)); if (a == NULL) { Py_DECREF(q); goto error; @@ -2202,8 +2201,8 @@ loghelper(PyObject* arg, double (*func)(double)) to compute the log anyway. Clear the exception and continue. */ PyErr_Clear(); x = _PyLong_Frexp((PyLongObject *)arg, &e); - if (x == -1.0 && PyErr_Occurred()) - return NULL; + assert(e >= 0); + assert(!PyErr_Occurred()); /* Value is ~= x * 2**e, so the log ~= log(x) + log(2) * e. */ result = func(x) + func(2.0) * e; } diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 86366c66c46552..334350285f3b6f 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -10121,7 +10121,10 @@ os_wait_impl(PyObject *module) } #endif /* HAVE_WAIT */ -#if defined(__linux__) && defined(__NR_pidfd_open) + +// This system call always crashes on older Android versions. +#if defined(__linux__) && defined(__NR_pidfd_open) && \ + !(defined(__ANDROID__) && __ANDROID_API__ < 31) /*[clinic input] os.pidfd_open pid: pid_t diff --git a/Modules/signalmodule.c b/Modules/signalmodule.c index 73bfcb756657b8..0e53a36bca55f0 100644 --- a/Modules/signalmodule.c +++ b/Modules/signalmodule.c @@ -1299,7 +1299,9 @@ signal_pthread_kill_impl(PyObject *module, unsigned long thread_id, #endif /* #if defined(HAVE_PTHREAD_KILL) */ -#if defined(__linux__) && defined(__NR_pidfd_send_signal) +// This system call always crashes on older Android versions. +#if defined(__linux__) && defined(__NR_pidfd_send_signal) && \ + !(defined(__ANDROID__) && __ANDROID_API__ < 31) /*[clinic input] signal.pidfd_send_signal diff --git a/Modules/timemodule.c b/Modules/timemodule.c index ee59fb73ac1e31..9720c201a184a8 100644 --- a/Modules/timemodule.c +++ b/Modules/timemodule.c @@ -813,7 +813,12 @@ time_strftime(PyObject *module, PyObject *args) return NULL; } -#if defined(_MSC_VER) || (defined(__sun) && defined(__SVR4)) || defined(_AIX) || defined(__VXWORKS__) +// Some platforms only support a limited range of years. +// +// Android works with negative years on the emulator, but fails on some +// physical devices (#123017). +#if defined(_MSC_VER) || (defined(__sun) && defined(__SVR4)) || defined(_AIX) \ + || defined(__VXWORKS__) || defined(__ANDROID__) if (buf.tm_year + 1900 < 1 || 9999 < buf.tm_year + 1900) { PyErr_SetString(PyExc_ValueError, "strftime() requires year in [1; 9999]"); diff --git a/Objects/capsule.c b/Objects/capsule.c index 555979dab2b789..28965e0f21b7a0 100644 --- a/Objects/capsule.c +++ b/Objects/capsule.c @@ -317,10 +317,14 @@ static int capsule_traverse(PyCapsule *capsule, visitproc visit, void *arg) { // Capsule object is only tracked by the GC - // if _PyCapsule_SetTraverse() is called - assert(capsule->traverse_func != NULL); + // if _PyCapsule_SetTraverse() is called, but + // this can still be manually triggered by gc.get_referents() + + if (capsule->traverse_func != NULL) { + return capsule->traverse_func((PyObject*)capsule, visit, arg); + } - return capsule->traverse_func((PyObject*)capsule, visit, arg); + return 0; } diff --git a/Objects/dictobject.c b/Objects/dictobject.c index f38ab1b2865e99..ef9d23e62f95de 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -1488,7 +1488,7 @@ _Py_dict_lookup_threadsafe(PyDictObject *mp, PyObject *key, Py_hash_t hash, PyOb *value_addr = value; if (value != NULL) { assert(ix >= 0); - Py_INCREF(value); + _Py_NewRefWithLock(value); } Py_END_CRITICAL_SECTION(); return ix; @@ -3930,13 +3930,13 @@ dict_copy_impl(PyDictObject *self) } /* Copies the values, but does not change the reference - * counts of the objects in the array. */ + * counts of the objects in the array. + * Return NULL, but does *not* set an exception on failure */ static PyDictValues * copy_values(PyDictValues *values) { PyDictValues *newvalues = new_values(values->capacity); if (newvalues == NULL) { - PyErr_NoMemory(); return NULL; } newvalues->size = values->size; @@ -7015,7 +7015,7 @@ _PyObject_TryGetInstanceAttribute(PyObject *obj, PyObject *name, PyObject **attr // Still no dict, we can read from the values assert(values->valid); value = values->values[ix]; - *attr = Py_XNewRef(value); + *attr = _Py_XNewRefWithLock(value); success = true; } @@ -7035,7 +7035,7 @@ _PyObject_TryGetInstanceAttribute(PyObject *obj, PyObject *name, PyObject **attr if (dict->ma_values == values && FT_ATOMIC_LOAD_UINT8(values->valid)) { value = _Py_atomic_load_ptr_relaxed(&values->values[ix]); - *attr = Py_XNewRef(value); + *attr = _Py_XNewRefWithLock(value); success = true; } else { // Caller needs to lookup from the dictionary @@ -7216,6 +7216,13 @@ _PyDict_DetachFromObject(PyDictObject *mp, PyObject *obj) PyDictValues *values = copy_values(mp->ma_values); if (values == NULL) { + /* Out of memory. Clear the dict */ + PyInterpreterState *interp = _PyInterpreterState_GET(); + PyDictKeysObject *oldkeys = mp->ma_keys; + set_keys(mp, Py_EMPTY_KEYS); + dictkeys_decref(interp, oldkeys, IS_DICT_SHARED(mp)); + STORE_USED(mp, 0); + PyErr_NoMemory(); return -1; } mp->ma_values = values; diff --git a/Objects/exceptions.c b/Objects/exceptions.c index fda62f159c1540..b3910855165494 100644 --- a/Objects/exceptions.c +++ b/Objects/exceptions.c @@ -3387,8 +3387,9 @@ _PyErr_NoMemory(PyThreadState *tstate) } static void -MemoryError_dealloc(PyBaseExceptionObject *self) +MemoryError_dealloc(PyObject *obj) { + PyBaseExceptionObject *self = (PyBaseExceptionObject *)obj; _PyObject_GC_UNTRACK(self); BaseException_clear(self); @@ -3447,7 +3448,7 @@ PyTypeObject _PyExc_MemoryError = { PyVarObject_HEAD_INIT(NULL, 0) "MemoryError", sizeof(PyBaseExceptionObject), - 0, (destructor)MemoryError_dealloc, 0, 0, 0, 0, 0, 0, 0, + 0, MemoryError_dealloc, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, PyDoc_STR("Out of memory."), (traverseproc)BaseException_traverse, diff --git a/Objects/floatobject.c b/Objects/floatobject.c index dc3d8a3e5d0f4b..a48a210adee3b9 100644 --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -406,19 +406,16 @@ float_richcompare(PyObject *v, PyObject *w, int op) } /* The signs are the same. */ /* Convert w to a double if it fits. In particular, 0 fits. */ - uint64_t nbits64 = _PyLong_NumBits(w); - if (nbits64 > (unsigned int)DBL_MAX_EXP) { + int64_t nbits64 = _PyLong_NumBits(w); + assert(nbits64 >= 0); + assert(!PyErr_Occurred()); + if (nbits64 > DBL_MAX_EXP) { /* This Python integer is larger than any finite C double. * Replace with little doubles * that give the same outcome -- w is so large that * its magnitude must exceed the magnitude of any * finite float. */ - if (nbits64 == (uint64_t)-1 && PyErr_Occurred()) { - /* This Python integer is so large that uint64_t isn't - * big enough to hold the # of bits. */ - PyErr_Clear(); - } i = (double)vsign; assert(wsign != 0); j = wsign * 2.0; diff --git a/Objects/frameobject.c b/Objects/frameobject.c index b567327f970836..f3a66ffc9aac8f 100644 --- a/Objects/frameobject.c +++ b/Objects/frameobject.c @@ -310,14 +310,31 @@ framelocalsproxy_dealloc(PyObject *self) static PyObject * framelocalsproxy_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { + if (PyTuple_GET_SIZE(args) != 1) { + PyErr_Format(PyExc_TypeError, + "FrameLocalsProxy expected 1 argument, got %zd", + PyTuple_GET_SIZE(args)); + return NULL; + } + PyObject *item = PyTuple_GET_ITEM(args, 0); + + if (!PyFrame_Check(item)) { + PyErr_Format(PyExc_TypeError, "expect frame, not %T", item); + return NULL; + } + PyFrameObject *frame = (PyFrameObject*)item; + + if (kwds != NULL && PyDict_Size(kwds) != 0) { + PyErr_SetString(PyExc_TypeError, + "FrameLocalsProxy takes no keyword arguments"); + return 0; + } + PyFrameLocalsProxyObject *self = (PyFrameLocalsProxyObject *)type->tp_alloc(type, 0); if (self == NULL) { return NULL; } - PyFrameObject *frame = (PyFrameObject*)PyTuple_GET_ITEM(args, 0); - assert(PyFrame_Check(frame)); - ((PyFrameLocalsProxyObject*)self)->frame = (PyFrameObject*)Py_NewRef(frame); return (PyObject *)self; @@ -1634,7 +1651,7 @@ frame_dealloc(PyFrameObject *f) /* Kill all local variables including specials, if we own them */ if (f->f_frame == frame && frame->owner == FRAME_OWNED_BY_FRAME_OBJECT) { PyStackRef_CLEAR(frame->f_executable); - Py_CLEAR(frame->f_funcobj); + PyStackRef_CLEAR(frame->f_funcobj); Py_CLEAR(frame->f_locals); _PyStackRef *locals = _PyFrame_GetLocalsArray(frame); _PyStackRef *sp = frame->stackpointer; @@ -1790,7 +1807,7 @@ static void init_frame(_PyInterpreterFrame *frame, PyFunctionObject *func, PyObject *locals) { PyCodeObject *code = (PyCodeObject *)func->func_code; - _PyFrame_Initialize(frame, (PyFunctionObject*)Py_NewRef(func), + _PyFrame_Initialize(frame, PyStackRef_FromPyObjectNew(func), Py_XNewRef(locals), code, 0, NULL); } @@ -1861,14 +1878,15 @@ frame_init_get_vars(_PyInterpreterFrame *frame) PyCodeObject *co = _PyFrame_GetCode(frame); int lasti = _PyInterpreterFrame_LASTI(frame); if (!(lasti < 0 && _PyCode_CODE(co)->op.code == COPY_FREE_VARS - && PyFunction_Check(frame->f_funcobj))) + && PyStackRef_FunctionCheck(frame->f_funcobj))) { /* Free vars are initialized */ return; } /* Free vars have not been initialized -- Do that */ - PyObject *closure = ((PyFunctionObject *)frame->f_funcobj)->func_closure; + PyFunctionObject *func = _PyFrame_GetFunction(frame); + PyObject *closure = func->func_closure; int offset = PyUnstable_Code_GetFirstFree(co); for (int i = 0; i < co->co_nfreevars; ++i) { PyObject *o = PyTuple_GET_ITEM(closure, i); diff --git a/Objects/genobject.c b/Objects/genobject.c index 5dc8f926557b52..41cf8fdcc9dee8 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -58,10 +58,7 @@ gen_traverse(PyGenObject *gen, visitproc visit, void *arg) else { // We still need to visit the code object when the frame is cleared to // ensure that it's kept alive if the reference is deferred. - int err = _PyGC_VisitStackRef(&gen->gi_iframe.f_executable, visit, arg); - if (err) { - return err; - } + _Py_VISIT_STACKREF(gen->gi_iframe.f_executable); } /* No need to visit cr_origin, because it's just tuples/str/int, so can't participate in a reference cycle. */ diff --git a/Objects/longobject.c b/Objects/longobject.c index d34c8b6d71ab3f..9beb5884a6932b 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -133,8 +133,16 @@ long_normalize(PyLongObject *v) /* Allocate a new int object with size digits. Return NULL and set exception if we run out of memory. */ -#define MAX_LONG_DIGITS \ +#if SIZEOF_SIZE_T < 8 +# define MAX_LONG_DIGITS \ ((PY_SSIZE_T_MAX - offsetof(PyLongObject, long_value.ob_digit))/sizeof(digit)) +#else +/* Guarantee that the number of bits fits in int64_t. + This is more than an exbibyte, that is more than many of modern + architectures support in principle. + -1 is added to avoid overflow in _PyLong_Frexp(). */ +# define MAX_LONG_DIGITS ((INT64_MAX-1) / PyLong_SHIFT) +#endif PyLongObject * _PyLong_New(Py_ssize_t size) @@ -804,11 +812,11 @@ bit_length_digit(digit x) return _Py_bit_length((unsigned long)x); } -uint64_t +int64_t _PyLong_NumBits(PyObject *vv) { PyLongObject *v = (PyLongObject *)vv; - uint64_t result = 0; + int64_t result = 0; Py_ssize_t ndigits; int msd_bits; @@ -818,21 +826,12 @@ _PyLong_NumBits(PyObject *vv) assert(ndigits == 0 || v->long_value.ob_digit[ndigits - 1] != 0); if (ndigits > 0) { digit msd = v->long_value.ob_digit[ndigits - 1]; - if ((uint64_t)(ndigits - 1) > UINT64_MAX / (uint64_t)PyLong_SHIFT) - goto Overflow; - result = (uint64_t)(ndigits - 1) * (uint64_t)PyLong_SHIFT; + assert(ndigits <= INT64_MAX / PyLong_SHIFT); + result = (int64_t)(ndigits - 1) * PyLong_SHIFT; msd_bits = bit_length_digit(msd); - if (UINT64_MAX - msd_bits < result) - goto Overflow; result += msd_bits; } return result; - - Overflow: - /* Very unlikely. Such integer would require more than 2 exbibytes of RAM. */ - PyErr_SetString(PyExc_OverflowError, "int has too many bits " - "to express in a 64-bit integer"); - return (uint64_t)-1; } PyObject * @@ -1247,15 +1246,12 @@ PyLong_AsNativeBytes(PyObject* vv, void* buffer, Py_ssize_t n, int flags) /* Calculates the number of bits required for the *absolute* value * of v. This does not take sign into account, only magnitude. */ - uint64_t nb = _PyLong_NumBits((PyObject *)v); - if (nb == (uint64_t)-1) { - res = -1; - } else { - /* Normally this would be((nb - 1) / 8) + 1 to avoid rounding up - * multiples of 8 to the next byte, but we add an implied bit for - * the sign and it cancels out. */ - res = (Py_ssize_t)(nb / 8) + 1; - } + int64_t nb = _PyLong_NumBits((PyObject *)v); + assert(nb >= 0); + /* Normally this would be ((nb - 1) / 8) + 1 to avoid rounding up + * multiples of 8 to the next byte, but we add an implied bit for + * the sign and it cancels out. */ + res = (Py_ssize_t)(nb / 8) + 1; /* Two edge cases exist that are best handled after extracting the * bits. These may result in us reporting overflow when the value @@ -3415,7 +3411,8 @@ x_divrem(PyLongObject *v1, PyLongObject *w1, PyLongObject **prem) double _PyLong_Frexp(PyLongObject *a, int64_t *e) { - Py_ssize_t a_size, shift_digits, shift_bits, x_size; + Py_ssize_t a_size, shift_digits, x_size; + int shift_bits; int64_t a_bits; /* See below for why x_digits is always large enough. */ digit rem; @@ -3432,14 +3429,7 @@ _PyLong_Frexp(PyLongObject *a, int64_t *e) *e = 0; return 0.0; } - int msd_bits = bit_length_digit(a->long_value.ob_digit[a_size-1]); - /* The following is an overflow-free version of the check - "if ((a_size - 1) * PyLong_SHIFT + msd_bits > PY_SSIZE_T_MAX) ..." */ - if (a_size >= (INT64_MAX - 1) / PyLong_SHIFT + 1 && - (a_size > (INT64_MAX - 1) / PyLong_SHIFT + 1 || - msd_bits > (INT64_MAX - 1) % PyLong_SHIFT + 1)) - goto overflow; - a_bits = (int64_t)(a_size - 1) * PyLong_SHIFT + msd_bits; + a_bits = _PyLong_NumBits((PyObject *)a); /* Shift the first DBL_MANT_DIG + 2 bits of a into x_digits[0:x_size] (shifting left if a_bits <= DBL_MANT_DIG + 2). @@ -3468,18 +3458,18 @@ _PyLong_Frexp(PyLongObject *a, int64_t *e) */ if (a_bits <= DBL_MANT_DIG + 2) { shift_digits = (DBL_MANT_DIG + 2 - (Py_ssize_t)a_bits) / PyLong_SHIFT; - shift_bits = (DBL_MANT_DIG + 2 - (Py_ssize_t)a_bits) % PyLong_SHIFT; + shift_bits = (DBL_MANT_DIG + 2 - (int)a_bits) % PyLong_SHIFT; x_size = shift_digits; rem = v_lshift(x_digits + x_size, a->long_value.ob_digit, a_size, - (int)shift_bits); + shift_bits); x_size += a_size; x_digits[x_size++] = rem; } else { shift_digits = (Py_ssize_t)((a_bits - DBL_MANT_DIG - 2) / PyLong_SHIFT); - shift_bits = (Py_ssize_t)((a_bits - DBL_MANT_DIG - 2) % PyLong_SHIFT); + shift_bits = (int)((a_bits - DBL_MANT_DIG - 2) % PyLong_SHIFT); rem = v_rshift(x_digits, a->long_value.ob_digit + shift_digits, - a_size - shift_digits, (int)shift_bits); + a_size - shift_digits, shift_bits); x_size = a_size - shift_digits; /* For correct rounding below, we need the least significant bit of x to be 'sticky' for this shift: if any of the bits @@ -3505,21 +3495,13 @@ _PyLong_Frexp(PyLongObject *a, int64_t *e) /* Rescale; make correction if result is 1.0. */ dx /= 4.0 * EXP2_DBL_MANT_DIG; if (dx == 1.0) { - if (a_bits == INT64_MAX) - goto overflow; + assert(a_bits < INT64_MAX); dx = 0.5; a_bits += 1; } *e = a_bits; return _PyLong_IsNegative(a) ? -dx : dx; - - overflow: - /* exponent > PY_SSIZE_T_MAX */ - PyErr_SetString(PyExc_OverflowError, - "huge integer: number of bits overflows a Py_ssize_t"); - *e = 0; - return -1.0; } /* Get a C double from an int object. Rounds to the nearest double, @@ -3547,7 +3529,9 @@ PyLong_AsDouble(PyObject *v) return (double)medium_value((PyLongObject *)v); } x = _PyLong_Frexp((PyLongObject *)v, &exponent); - if ((x == -1.0 && PyErr_Occurred()) || exponent > DBL_MAX_EXP) { + assert(exponent >= 0); + assert(!PyErr_Occurred()); + if (exponent > DBL_MAX_EXP) { PyErr_SetString(PyExc_OverflowError, "int too large to convert to float"); return -1.0; @@ -5217,39 +5201,6 @@ long_bool(PyLongObject *v) return !_PyLong_IsZero(v); } -/* wordshift, remshift = divmod(shiftby, PyLong_SHIFT) */ -static int -divmod_shift(PyObject *shiftby, Py_ssize_t *wordshift, digit *remshift) -{ - assert(PyLong_Check(shiftby)); - assert(!_PyLong_IsNegative((PyLongObject *)shiftby)); - Py_ssize_t lshiftby = PyLong_AsSsize_t((PyObject *)shiftby); - if (lshiftby >= 0) { - *wordshift = lshiftby / PyLong_SHIFT; - *remshift = lshiftby % PyLong_SHIFT; - return 0; - } - /* PyLong_Check(shiftby) is true and shiftby is not negative, so it must - be that PyLong_AsSsize_t raised an OverflowError. */ - assert(PyErr_ExceptionMatches(PyExc_OverflowError)); - PyErr_Clear(); - PyLongObject *wordshift_obj = divrem1((PyLongObject *)shiftby, PyLong_SHIFT, remshift); - if (wordshift_obj == NULL) { - return -1; - } - *wordshift = PyLong_AsSsize_t((PyObject *)wordshift_obj); - Py_DECREF(wordshift_obj); - if (*wordshift >= 0 && *wordshift < PY_SSIZE_T_MAX / (Py_ssize_t)sizeof(digit)) { - return 0; - } - PyErr_Clear(); - /* Clip the value. With such large wordshift the right shift - returns 0 and the left shift raises an error in _PyLong_New(). */ - *wordshift = PY_SSIZE_T_MAX / sizeof(digit); - *remshift = 0; - return 0; -} - /* Inner function for both long_rshift and _PyLong_Rshift, shifting an integer right by PyLong_SHIFT*wordshift + remshift bits. wordshift should be nonnegative. */ @@ -5343,8 +5294,7 @@ long_rshift1(PyLongObject *a, Py_ssize_t wordshift, digit remshift) static PyObject * long_rshift(PyObject *a, PyObject *b) { - Py_ssize_t wordshift; - digit remshift; + int64_t shiftby; CHECK_BINOP(a, b); @@ -5355,24 +5305,35 @@ long_rshift(PyObject *a, PyObject *b) if (_PyLong_IsZero((PyLongObject *)a)) { return PyLong_FromLong(0); } - if (divmod_shift(b, &wordshift, &remshift) < 0) - return NULL; - return long_rshift1((PyLongObject *)a, wordshift, remshift); + if (PyLong_AsInt64(b, &shiftby) < 0) { + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) { + return NULL; + } + PyErr_Clear(); + if (_PyLong_IsNegative((PyLongObject *)a)) { + return PyLong_FromLong(-1); + } + else { + return PyLong_FromLong(0); + } + } + return _PyLong_Rshift(a, shiftby); } /* Return a >> shiftby. */ PyObject * -_PyLong_Rshift(PyObject *a, uint64_t shiftby) +_PyLong_Rshift(PyObject *a, int64_t shiftby) { Py_ssize_t wordshift; digit remshift; assert(PyLong_Check(a)); + assert(shiftby >= 0); if (_PyLong_IsZero((PyLongObject *)a)) { return PyLong_FromLong(0); } -#if PY_SSIZE_T_MAX <= UINT64_MAX / PyLong_SHIFT - if (shiftby > (uint64_t)PY_SSIZE_T_MAX * PyLong_SHIFT) { +#if PY_SSIZE_T_MAX <= INT64_MAX / PyLong_SHIFT + if (shiftby > (int64_t)PY_SSIZE_T_MAX * PyLong_SHIFT) { if (_PyLong_IsNegative((PyLongObject *)a)) { return PyLong_FromLong(-1); } @@ -5430,8 +5391,7 @@ long_lshift1(PyLongObject *a, Py_ssize_t wordshift, digit remshift) static PyObject * long_lshift(PyObject *a, PyObject *b) { - Py_ssize_t wordshift; - digit remshift; + int64_t shiftby; CHECK_BINOP(a, b); @@ -5442,24 +5402,30 @@ long_lshift(PyObject *a, PyObject *b) if (_PyLong_IsZero((PyLongObject *)a)) { return PyLong_FromLong(0); } - if (divmod_shift(b, &wordshift, &remshift) < 0) + if (PyLong_AsInt64(b, &shiftby) < 0) { + if (PyErr_ExceptionMatches(PyExc_OverflowError)) { + PyErr_SetString(PyExc_OverflowError, + "too many digits in integer"); + } return NULL; - return long_lshift1((PyLongObject *)a, wordshift, remshift); + } + return _PyLong_Lshift(a, shiftby); } /* Return a << shiftby. */ PyObject * -_PyLong_Lshift(PyObject *a, uint64_t shiftby) +_PyLong_Lshift(PyObject *a, int64_t shiftby) { Py_ssize_t wordshift; digit remshift; assert(PyLong_Check(a)); + assert(shiftby >= 0); if (_PyLong_IsZero((PyLongObject *)a)) { return PyLong_FromLong(0); } -#if PY_SSIZE_T_MAX <= UINT64_MAX / PyLong_SHIFT - if (shiftby > (uint64_t)PY_SSIZE_T_MAX * PyLong_SHIFT) { +#if PY_SSIZE_T_MAX <= INT64_MAX / PyLong_SHIFT + if (shiftby > (int64_t)PY_SSIZE_T_MAX * PyLong_SHIFT) { PyErr_SetString(PyExc_OverflowError, "too many digits in integer"); return NULL; @@ -6213,11 +6179,10 @@ static PyObject * int_bit_length_impl(PyObject *self) /*[clinic end generated code: output=fc1977c9353d6a59 input=e4eb7a587e849a32]*/ { - uint64_t nbits = _PyLong_NumBits(self); - if (nbits == (uint64_t)-1) { - return NULL; - } - return PyLong_FromUnsignedLongLong(nbits); + int64_t nbits = _PyLong_NumBits(self); + assert(nbits >= 0); + assert(!PyErr_Occurred()); + return PyLong_FromInt64(nbits); } static int @@ -6251,40 +6216,13 @@ int_bit_count_impl(PyObject *self) PyLongObject *z = (PyLongObject *)self; Py_ssize_t ndigits = _PyLong_DigitCount(z); - Py_ssize_t bit_count = 0; + int64_t bit_count = 0; - /* Each digit has up to PyLong_SHIFT ones, so the accumulated bit count - from the first PY_SSIZE_T_MAX/PyLong_SHIFT digits can't overflow a - Py_ssize_t. */ - Py_ssize_t ndigits_fast = Py_MIN(ndigits, PY_SSIZE_T_MAX/PyLong_SHIFT); - for (Py_ssize_t i = 0; i < ndigits_fast; i++) { + for (Py_ssize_t i = 0; i < ndigits; i++) { bit_count += popcount_digit(z->long_value.ob_digit[i]); } - PyObject *result = PyLong_FromSsize_t(bit_count); - if (result == NULL) { - return NULL; - } - - /* Use Python integers if bit_count would overflow. */ - for (Py_ssize_t i = ndigits_fast; i < ndigits; i++) { - PyObject *x = PyLong_FromLong(popcount_digit(z->long_value.ob_digit[i])); - if (x == NULL) { - goto error; - } - PyObject *y = long_add((PyLongObject *)result, (PyLongObject *)x); - Py_DECREF(x); - if (y == NULL) { - goto error; - } - Py_SETREF(result, y); - } - - return result; - - error: - Py_DECREF(result); - return NULL; + return PyLong_FromInt64(bit_count); } /*[clinic input] diff --git a/Objects/rangeobject.c b/Objects/rangeobject.c index 1318ce0319d438..2942ab624edf72 100644 --- a/Objects/rangeobject.c +++ b/Objects/rangeobject.c @@ -143,14 +143,14 @@ range_new(PyTypeObject *type, PyObject *args, PyObject *kw) static PyObject * -range_vectorcall(PyTypeObject *type, PyObject *const *args, +range_vectorcall(PyObject *rangetype, PyObject *const *args, size_t nargsf, PyObject *kwnames) { Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); if (!_PyArg_NoKwnames("range", kwnames)) { return NULL; } - return range_from_array(type, args, nargs); + return range_from_array((PyTypeObject *)rangetype, args, nargs); } PyDoc_STRVAR(range_doc, @@ -803,7 +803,7 @@ PyTypeObject PyRange_Type = { 0, /* tp_init */ 0, /* tp_alloc */ range_new, /* tp_new */ - .tp_vectorcall = (vectorcallfunc)range_vectorcall + .tp_vectorcall = range_vectorcall }; /*********************** range Iterator **************************/ diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index f14f10ab9c0a46..4d8cca68df946a 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -999,8 +999,9 @@ tupleiter_traverse(_PyTupleIterObject *it, visitproc visit, void *arg) } static PyObject * -tupleiter_next(_PyTupleIterObject *it) +tupleiter_next(PyObject *obj) { + _PyTupleIterObject *it = (_PyTupleIterObject *)obj; PyTupleObject *seq; PyObject *item; @@ -1101,7 +1102,7 @@ PyTypeObject PyTupleIter_Type = { 0, /* tp_richcompare */ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter */ - (iternextfunc)tupleiter_next, /* tp_iternext */ + tupleiter_next, /* tp_iternext */ tupleiter_methods, /* tp_methods */ 0, }; diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 68e481f8e5163b..0e2d9758a5ffae 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -1435,6 +1435,9 @@ type_set_module(PyTypeObject *type, PyObject *value, void *context) PyType_Modified(type); PyObject *dict = lookup_tp_dict(type); + if (PyDict_Pop(dict, &_Py_ID(__firstlineno__), NULL) < 0) { + return -1; + } return PyDict_SetItem(dict, &_Py_ID(__module__), value); } @@ -5207,8 +5210,8 @@ PyType_GetModuleState(PyTypeObject *type) /* Get the module of the first superclass where the module has the * given PyModuleDef. */ -static inline PyObject * -get_module_by_def(PyTypeObject *type, PyModuleDef *def) +PyObject * +PyType_GetModuleByDef(PyTypeObject *type, PyModuleDef *def) { assert(PyType_Check(type)); @@ -5241,7 +5244,7 @@ get_module_by_def(PyTypeObject *type, PyModuleDef *def) Py_ssize_t n = PyTuple_GET_SIZE(mro); for (Py_ssize_t i = 1; i < n; i++) { PyObject *super = PyTuple_GET_ITEM(mro, i); - if(!_PyType_HasFeature((PyTypeObject *)super, Py_TPFLAGS_HEAPTYPE)) { + if (!_PyType_HasFeature((PyTypeObject *)super, Py_TPFLAGS_HEAPTYPE)) { // Static types in the MRO need to be skipped continue; } @@ -5254,37 +5257,14 @@ get_module_by_def(PyTypeObject *type, PyModuleDef *def) } } END_TYPE_LOCK(); - return res; -} -PyObject * -PyType_GetModuleByDef(PyTypeObject *type, PyModuleDef *def) -{ - PyObject *module = get_module_by_def(type, def); - if (module == NULL) { + if (res == NULL) { PyErr_Format( PyExc_TypeError, "PyType_GetModuleByDef: No superclass of '%s' has the given module", type->tp_name); } - return module; -} - -PyObject * -_PyType_GetModuleByDef2(PyTypeObject *left, PyTypeObject *right, - PyModuleDef *def) -{ - PyObject *module = get_module_by_def(left, def); - if (module == NULL) { - module = get_module_by_def(right, def); - if (module == NULL) { - PyErr_Format( - PyExc_TypeError, - "PyType_GetModuleByDef: No superclass of '%s' nor '%s' has " - "the given module", left->tp_name, right->tp_name); - } - } - return module; + return res; } diff --git a/Objects/typevarobject.c b/Objects/typevarobject.c index 3c96850589d378..51d93ed8b5ba8c 100644 --- a/Objects/typevarobject.c +++ b/Objects/typevarobject.c @@ -151,7 +151,7 @@ constevaluator_clear(PyObject *self) } static PyObject * -constevaluator_repr(PyObject *self, PyObject *repr) +constevaluator_repr(PyObject *self) { PyObject *value = ((constevaluatorobject *)self)->value; return PyUnicode_FromFormat("", value); @@ -168,7 +168,7 @@ constevaluator_call(PyObject *self, PyObject *args, PyObject *kwargs) return NULL; } PyObject *value = ((constevaluatorobject *)self)->value; - if (format == 3) { // SOURCE + if (format == 3) { // STRING PyUnicodeWriter *writer = PyUnicodeWriter_Create(5); // cannot be <5 if (writer == NULL) { return NULL; @@ -242,7 +242,8 @@ static PyType_Slot constevaluator_slots[] = { PyType_Spec constevaluator_spec = { .name = "_typing._ConstEvaluator", .basicsize = sizeof(constevaluatorobject), - .flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_IMMUTABLETYPE, + .flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_IMMUTABLETYPE + | Py_TPFLAGS_DISALLOW_INSTANTIATION, .slots = constevaluator_slots, }; @@ -372,10 +373,10 @@ caller(void) if (f == NULL) { Py_RETURN_NONE; } - if (f == NULL || f->f_funcobj == NULL) { + if (f == NULL || PyStackRef_IsNull(f->f_funcobj)) { Py_RETURN_NONE; } - PyObject *r = PyFunction_GetModule(f->f_funcobj); + PyObject *r = PyFunction_GetModule(PyStackRef_AsPyObjectBorrow(f->f_funcobj)); if (!r) { PyErr_Clear(); Py_RETURN_NONE; @@ -1914,7 +1915,16 @@ typealias_alloc(PyObject *name, PyObject *type_params, PyObject *compute_value, return NULL; } ta->name = Py_NewRef(name); - ta->type_params = Py_IsNone(type_params) ? NULL : Py_XNewRef(type_params); + if ( + type_params == NULL + || Py_IsNone(type_params) + || (PyTuple_Check(type_params) && PyTuple_GET_SIZE(type_params) == 0) + ) { + ta->type_params = NULL; + } + else { + ta->type_params = Py_NewRef(type_params); + } ta->compute_value = Py_XNewRef(compute_value); ta->value = Py_XNewRef(value); ta->module = Py_XNewRef(module); diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index b956d0f4e44b9d..94e2bfc75637cb 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -282,13 +282,37 @@ hashtable_unicode_compare(const void *key1, const void *key2) } } +/* Return true if this interpreter should share the main interpreter's + intern_dict. That's important for interpreters which load basic + single-phase init extension modules (m_size == -1). There could be interned + immortal strings that are shared between interpreters, due to the + PyDict_Update(mdict, m_copy) call in import_find_extension(). + + It's not safe to deallocate those strings until all interpreters that + potentially use them are freed. By storing them in the main interpreter, we + ensure they get freed after all other interpreters are freed. +*/ +static bool +has_shared_intern_dict(PyInterpreterState *interp) +{ + PyInterpreterState *main_interp = _PyInterpreterState_Main(); + return interp != main_interp && interp->feature_flags & Py_RTFLAGS_USE_MAIN_OBMALLOC; +} + static int init_interned_dict(PyInterpreterState *interp) { assert(get_interned_dict(interp) == NULL); - PyObject *interned = interned = PyDict_New(); - if (interned == NULL) { - return -1; + PyObject *interned; + if (has_shared_intern_dict(interp)) { + interned = get_interned_dict(_PyInterpreterState_Main()); + Py_INCREF(interned); + } + else { + interned = PyDict_New(); + if (interned == NULL) { + return -1; + } } _Py_INTERP_CACHED_OBJECT(interp, interned_strings) = interned; return 0; @@ -299,7 +323,10 @@ clear_interned_dict(PyInterpreterState *interp) { PyObject *interned = get_interned_dict(interp); if (interned != NULL) { - PyDict_Clear(interned); + if (!has_shared_intern_dict(interp)) { + // only clear if the dict belongs to this interpreter + PyDict_Clear(interned); + } Py_DECREF(interned); _Py_INTERP_CACHED_OBJECT(interp, interned_strings) = NULL; } @@ -2694,11 +2721,6 @@ unicode_fromformat_write_wcstr(_PyUnicodeWriter *writer, const wchar_t *str, #define F_SIZE 3 #define F_PTRDIFF 4 #define F_INTMAX 5 -static const char * const formats[] = {"%d", "%ld", "%lld", "%zd", "%td", "%jd"}; -static const char * const formats_o[] = {"%o", "%lo", "%llo", "%zo", "%to", "%jo"}; -static const char * const formats_u[] = {"%u", "%lu", "%llu", "%zu", "%tu", "%ju"}; -static const char * const formats_x[] = {"%x", "%lx", "%llx", "%zx", "%tx", "%jx"}; -static const char * const formats_X[] = {"%X", "%lX", "%llX", "%zX", "%tX", "%jX"}; static const char* unicode_fromformat_arg(_PyUnicodeWriter *writer, @@ -2840,8 +2862,8 @@ unicode_fromformat_arg(_PyUnicodeWriter *writer, case 'd': case 'i': case 'o': case 'u': case 'x': case 'X': { - /* used by sprintf */ char buffer[MAX_INTMAX_CHARS]; +<<<<<<< HEAD const char *fmt = NULL; switch (*f) { case 'o': fmt = formats_o[sizemod]; break; @@ -2857,37 +2879,49 @@ unicode_fromformat_arg(_PyUnicodeWriter *writer, // to the format string not being a string literal. _Py_COMP_DIAG_PUSH _Py_COMP_DIAG_IGNORE_FORMAT_NONLITERAL +======= + + // Fill buffer using sprinf, with one of many possible format + // strings, like "%llX" for `long long` in hexadecimal. + // The type/size is in `sizemod`; the format is in `*f`. + + // Use macros with nested switches to keep the sprintf format strings + // as compile-time literals, avoiding warnings and maybe allowing + // optimizations. + + // `SPRINT` macro does one sprintf + // Example usage: SPRINT("l", "X", unsigned long) expands to + // sprintf(buffer, "%" "l" "X", va_arg(*vargs, unsigned long)) + #define SPRINT(SIZE_SPEC, FMT_CHAR, TYPE) \ + sprintf(buffer, "%" SIZE_SPEC FMT_CHAR, va_arg(*vargs, TYPE)) + + // One inner switch to handle all format variants + #define DO_SPRINTS(SIZE_SPEC, SIGNED_TYPE, UNSIGNED_TYPE) \ + switch (*f) { \ + case 'o': len = SPRINT(SIZE_SPEC, "o", UNSIGNED_TYPE); break; \ + case 'u': len = SPRINT(SIZE_SPEC, "u", UNSIGNED_TYPE); break; \ + case 'x': len = SPRINT(SIZE_SPEC, "x", UNSIGNED_TYPE); break; \ + case 'X': len = SPRINT(SIZE_SPEC, "X", UNSIGNED_TYPE); break; \ + default: len = SPRINT(SIZE_SPEC, "d", SIGNED_TYPE); break; \ + } + + // Outer switch to handle all the sizes/types +>>>>>>> main switch (sizemod) { - case F_LONG: - len = issigned ? - sprintf(buffer, fmt, va_arg(*vargs, long)) : - sprintf(buffer, fmt, va_arg(*vargs, unsigned long)); - break; - case F_LONGLONG: - len = issigned ? - sprintf(buffer, fmt, va_arg(*vargs, long long)) : - sprintf(buffer, fmt, va_arg(*vargs, unsigned long long)); - break; - case F_SIZE: - len = issigned ? - sprintf(buffer, fmt, va_arg(*vargs, Py_ssize_t)) : - sprintf(buffer, fmt, va_arg(*vargs, size_t)); - break; - case F_PTRDIFF: - len = sprintf(buffer, fmt, va_arg(*vargs, ptrdiff_t)); - break; - case F_INTMAX: - len = issigned ? - sprintf(buffer, fmt, va_arg(*vargs, intmax_t)) : - sprintf(buffer, fmt, va_arg(*vargs, uintmax_t)); - break; - default: - len = issigned ? - sprintf(buffer, fmt, va_arg(*vargs, int)) : - sprintf(buffer, fmt, va_arg(*vargs, unsigned int)); - break; - } + case F_LONG: DO_SPRINTS("l", long, unsigned long); break; + case F_LONGLONG: DO_SPRINTS("ll", long long, unsigned long long); break; + case F_SIZE: DO_SPRINTS("z", Py_ssize_t, size_t); break; + case F_PTRDIFF: DO_SPRINTS("t", ptrdiff_t, ptrdiff_t); break; + case F_INTMAX: DO_SPRINTS("j", intmax_t, uintmax_t); break; + default: DO_SPRINTS("", int, unsigned int); break; + } +<<<<<<< HEAD _Py_COMP_DIAG_POP +======= + #undef SPRINT + #undef DO_SPRINTS + +>>>>>>> main assert(len >= 0); int sign = (buffer[0] == '-'); @@ -15633,6 +15667,13 @@ _PyUnicode_ClearInterned(PyInterpreterState *interp) } assert(PyDict_CheckExact(interned)); + if (has_shared_intern_dict(interp)) { + // the dict doesn't belong to this interpreter, skip the debug + // checks on it and just clear the pointer to it + clear_interned_dict(interp); + return; + } + #ifdef INTERNED_STATS fprintf(stderr, "releasing %zd interned strings\n", PyDict_GET_SIZE(interned)); @@ -16141,8 +16182,10 @@ _PyUnicode_Fini(PyInterpreterState *interp) { struct _Py_unicode_state *state = &interp->unicode; - // _PyUnicode_ClearInterned() must be called before _PyUnicode_Fini() - assert(get_interned_dict(interp) == NULL); + if (!has_shared_intern_dict(interp)) { + // _PyUnicode_ClearInterned() must be called before _PyUnicode_Fini() + assert(get_interned_dict(interp) == NULL); + } _PyUnicode_FiniEncodings(&state->fs_codec); diff --git a/PC/pyconfig.h.in b/PC/pyconfig.h.in index 503f3193e2803e..010f5fe5646630 100644 --- a/PC/pyconfig.h.in +++ b/PC/pyconfig.h.in @@ -169,9 +169,9 @@ WIN32 is still required for the locale module. #endif /* MS_WIN64 */ /* set the version macros for the windows headers */ -/* Python 3.9+ requires Windows 8 or greater */ -#define Py_WINVER 0x0602 /* _WIN32_WINNT_WIN8 */ -#define Py_NTDDI NTDDI_WIN8 +/* Python 3.13+ requires Windows 10 or greater */ +#define Py_WINVER 0x0A00 /* _WIN32_WINNT_WIN10 */ +#define Py_NTDDI NTDDI_WIN10 /* We only set these values when building Python - we don't want to force these values on extensions, as that will affect the prototypes and diff --git a/PCbuild/_testlimitedcapi.vcxproj b/PCbuild/_testlimitedcapi.vcxproj index a1409ecf043d2d..846e027e10c7fa 100644 --- a/PCbuild/_testlimitedcapi.vcxproj +++ b/PCbuild/_testlimitedcapi.vcxproj @@ -97,6 +97,7 @@ + diff --git a/PCbuild/_testlimitedcapi.vcxproj.filters b/PCbuild/_testlimitedcapi.vcxproj.filters index e27e3171e1e6aa..57be2e2fc5b950 100644 --- a/PCbuild/_testlimitedcapi.vcxproj.filters +++ b/PCbuild/_testlimitedcapi.vcxproj.filters @@ -12,6 +12,7 @@ + diff --git a/PCbuild/get_externals.bat b/PCbuild/get_externals.bat index 137c94789e1809..dfacd1d1e788d4 100644 --- a/PCbuild/get_externals.bat +++ b/PCbuild/get_externals.bat @@ -56,8 +56,8 @@ if NOT "%IncludeLibffiSrc%"=="false" set libraries=%libraries% libffi-3.4.4 if NOT "%IncludeSSLSrc%"=="false" set libraries=%libraries% openssl-3.0.15 set libraries=%libraries% mpdecimal-4.0.0 set libraries=%libraries% sqlite-3.45.3.0 -if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tcl-core-8.6.14.0 -if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tk-8.6.14.0 +if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tcl-core-8.6.15.0 +if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tk-8.6.15.0 set libraries=%libraries% xz-5.2.5 set libraries=%libraries% zlib-1.3.1 @@ -78,7 +78,7 @@ echo.Fetching external binaries... set binaries= if NOT "%IncludeLibffi%"=="false" set binaries=%binaries% libffi-3.4.4 if NOT "%IncludeSSL%"=="false" set binaries=%binaries% openssl-bin-3.0.15 -if NOT "%IncludeTkinter%"=="false" set binaries=%binaries% tcltk-8.6.14.0 +if NOT "%IncludeTkinter%"=="false" set binaries=%binaries% tcltk-8.6.15.0 if NOT "%IncludeSSLSrc%"=="false" set binaries=%binaries% nasm-2.11.06 for %%b in (%binaries%) do ( diff --git a/PCbuild/readme.txt b/PCbuild/readme.txt index 865e294d260a49..693fcee5f90ce2 100644 --- a/PCbuild/readme.txt +++ b/PCbuild/readme.txt @@ -195,7 +195,7 @@ _sqlite3 Homepage: https://www.sqlite.org/ _tkinter - Wraps version 8.6.14 of the Tk windowing system, which is downloaded + Wraps version 8.6.15 of the Tk windowing system, which is downloaded from our binaries repository at https://github.com/python/cpython-bin-deps. diff --git a/PCbuild/tcltk.props b/PCbuild/tcltk.props index 83c38c993d5754..b4cb401609d409 100644 --- a/PCbuild/tcltk.props +++ b/PCbuild/tcltk.props @@ -2,7 +2,7 @@ - 8.6.14.0 + 8.6.15.0 $(TclVersion) $([System.Version]::Parse($(TclVersion)).Major) $([System.Version]::Parse($(TclVersion)).Minor) diff --git a/Programs/_testembed.c b/Programs/_testembed.c index 10ee6b7be23e21..ab2b2d06cca15d 100644 --- a/Programs/_testembed.c +++ b/Programs/_testembed.c @@ -810,6 +810,7 @@ static void set_most_env_vars(void) #ifdef Py_STATS putenv("PYTHONSTATS=1"); #endif + putenv("PYTHONPERFSUPPORT=1"); } @@ -1844,6 +1845,10 @@ static int test_initconfig_api(void) goto error; } + if (PyInitConfig_SetInt(config, "perf_profiling", 2) < 0) { + goto error; + } + // Set a UTF-8 string (program_name) if (PyInitConfig_SetStr(config, "program_name", PROGRAM_NAME_UTF8) < 0) { goto error; diff --git a/Python/ast_opt.c b/Python/ast_opt.c index f5b04757e08bf3..01e208b88eca8b 100644 --- a/Python/ast_opt.c +++ b/Python/ast_opt.c @@ -169,11 +169,10 @@ safe_multiply(PyObject *v, PyObject *w) if (PyLong_Check(v) && PyLong_Check(w) && !_PyLong_IsZero((PyLongObject *)v) && !_PyLong_IsZero((PyLongObject *)w) ) { - uint64_t vbits = _PyLong_NumBits(v); - uint64_t wbits = _PyLong_NumBits(w); - if (vbits == (uint64_t)-1 || wbits == (uint64_t)-1) { - return NULL; - } + int64_t vbits = _PyLong_NumBits(v); + int64_t wbits = _PyLong_NumBits(w); + assert(vbits >= 0); + assert(wbits >= 0); if (vbits + wbits > MAX_INT_SIZE) { return NULL; } @@ -215,12 +214,13 @@ safe_power(PyObject *v, PyObject *w) if (PyLong_Check(v) && PyLong_Check(w) && !_PyLong_IsZero((PyLongObject *)v) && _PyLong_IsPositive((PyLongObject *)w) ) { - uint64_t vbits = _PyLong_NumBits(v); + int64_t vbits = _PyLong_NumBits(v); size_t wbits = PyLong_AsSize_t(w); - if (vbits == (uint64_t)-1 || wbits == (size_t)-1) { + assert(vbits >= 0); + if (wbits == (size_t)-1) { return NULL; } - if (vbits > MAX_INT_SIZE / wbits) { + if ((uint64_t)vbits > MAX_INT_SIZE / wbits) { return NULL; } } @@ -234,12 +234,13 @@ safe_lshift(PyObject *v, PyObject *w) if (PyLong_Check(v) && PyLong_Check(w) && !_PyLong_IsZero((PyLongObject *)v) && !_PyLong_IsZero((PyLongObject *)w) ) { - uint64_t vbits = _PyLong_NumBits(v); + int64_t vbits = _PyLong_NumBits(v); size_t wbits = PyLong_AsSize_t(w); - if (vbits == (uint64_t)-1 || wbits == (size_t)-1) { + assert(vbits >= 0); + if (wbits == (size_t)-1) { return NULL; } - if (wbits > MAX_INT_SIZE || vbits > MAX_INT_SIZE - wbits) { + if (wbits > MAX_INT_SIZE || (uint64_t)vbits > MAX_INT_SIZE - wbits) { return NULL; } } diff --git a/Python/bytecodes.c b/Python/bytecodes.c index 846404e28bb18f..8535306d9c7a03 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -808,14 +808,13 @@ dummy_func( assert(code->co_argcount == 2); DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize)); STAT_INC(BINARY_SUBSCR, hit); - Py_INCREF(getitem); } op(_BINARY_SUBSCR_INIT_CALL, (container, sub -- new_frame: _PyInterpreterFrame* )) { PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(container)); PyHeapTypeObject *ht = (PyHeapTypeObject *)tp; PyObject *getitem = ht->_spec_cache.getitem; - new_frame = _PyFrame_PushUnchecked(tstate, (PyFunctionObject *)getitem, 2, frame); + new_frame = _PyFrame_PushUnchecked(tstate, PyStackRef_FromPyObjectNew(getitem), 2, frame); SYNC_SP(); new_frame->localsplus[0] = container; new_frame->localsplus[1] = sub; @@ -1666,8 +1665,9 @@ dummy_func( inst(COPY_FREE_VARS, (--)) { /* Copy closure variables to free variables */ PyCodeObject *co = _PyFrame_GetCode(frame); - assert(PyFunction_Check(frame->f_funcobj)); - PyObject *closure = ((PyFunctionObject *)frame->f_funcobj)->func_closure; + assert(PyStackRef_FunctionCheck(frame->f_funcobj)); + PyFunctionObject *func = (PyFunctionObject *)PyStackRef_AsPyObjectBorrow(frame->f_funcobj); + PyObject *closure = func->func_closure; assert(oparg == co->co_nfreevars); int offset = co->co_nlocalsplus - oparg; for (int i = 0; i < oparg; ++i) { @@ -2170,8 +2170,7 @@ dummy_func( DEOPT_IF(code->co_argcount != 1); DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize)); STAT_INC(LOAD_ATTR, hit); - Py_INCREF(fget); - new_frame = _PyFrame_PushUnchecked(tstate, f, 1, frame); + new_frame = _PyFrame_PushUnchecked(tstate, PyStackRef_FromPyObjectNew(fget), 1, frame); new_frame->localsplus[0] = owner; } @@ -2202,8 +2201,8 @@ dummy_func( STAT_INC(LOAD_ATTR, hit); PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 1); - Py_INCREF(f); - _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, f, 2, frame); + _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked( + tstate, PyStackRef_FromPyObjectNew(f), 2, frame); // Manipulate stack directly because we exit with DISPATCH_INLINED(). STACK_SHRINK(1); new_frame->localsplus[0] = owner; @@ -2571,6 +2570,14 @@ dummy_func( JUMP_BACKWARD_NO_INTERRUPT, }; + pseudo(JUMP_IF_FALSE, (cond -- cond)) = [ + COPY, TO_BOOL, POP_JUMP_IF_FALSE, + ]; + + pseudo(JUMP_IF_TRUE, (cond -- cond)) = [ + COPY, TO_BOOL, POP_JUMP_IF_TRUE, + ]; + tier1 inst(ENTER_EXECUTOR, (--)) { #ifdef _Py_TIER2 PyCodeObject *code = _PyFrame_GetCode(frame); @@ -3251,7 +3258,7 @@ dummy_func( int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable_o))->co_flags; PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o)); _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit( - tstate, (PyFunctionObject *)PyStackRef_AsPyObjectSteal(callable), locals, + tstate, callable, locals, args, total_args, NULL, frame ); // Manipulate stack directly since we leave using DISPATCH_INLINED(). @@ -3340,7 +3347,7 @@ dummy_func( int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable_o))->co_flags; PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o)); new_frame = _PyEvalFramePushAndInit( - tstate, (PyFunctionObject *)PyStackRef_AsPyObjectSteal(callable), locals, + tstate, callable, locals, args, total_args, NULL, frame ); // The frame has stolen all the arguments from the stack, @@ -3475,11 +3482,9 @@ dummy_func( } replicate(5) pure op(_INIT_CALL_PY_EXACT_ARGS, (callable, self_or_null[1], args[oparg] -- new_frame: _PyInterpreterFrame*)) { - PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable); int has_self = !PyStackRef_IsNull(self_or_null[0]); STAT_INC(CALL, hit); - PyFunctionObject *func = (PyFunctionObject *)callable_o; - new_frame = _PyFrame_PushUnchecked(tstate, func, oparg + has_self, frame); + new_frame = _PyFrame_PushUnchecked(tstate, callable, oparg + has_self, frame); _PyStackRef *first_non_self_local = new_frame->localsplus + has_self; new_frame->localsplus[0] = self_or_null[0]; for (int i = 0; i < oparg; i++) { @@ -3601,10 +3606,9 @@ dummy_func( assert(_PyCode_CODE(_PyFrame_GetCode(shim))[0].op.code == EXIT_INIT_CHECK); /* Push self onto stack of shim */ shim->localsplus[0] = PyStackRef_DUP(self); - PyFunctionObject *init_func = (PyFunctionObject *)PyStackRef_AsPyObjectSteal(init); args[-1] = self; init_frame = _PyEvalFramePushAndInit( - tstate, init_func, NULL, args-1, oparg+1, NULL, shim); + tstate, init, NULL, args-1, oparg+1, NULL, shim); SYNC_SP(); if (init_frame == NULL) { _PyEval_FrameClearAndPop(tstate, shim); @@ -3924,7 +3928,7 @@ dummy_func( PyCFunctionFastWithKeywords cfunc = (PyCFunctionFastWithKeywords)(void(*)(void))meth->ml_meth; - STACKREFS_TO_PYOBJECTS(args, nargs, args_o); + STACKREFS_TO_PYOBJECTS(args, total_args, args_o); if (CONVERSION_FAILED(args_o)) { DECREF_INPUTS(); ERROR_IF(true, error); @@ -4005,7 +4009,7 @@ dummy_func( (PyCFunctionFast)(void(*)(void))meth->ml_meth; int nargs = total_args - 1; - STACKREFS_TO_PYOBJECTS(args, nargs, args_o); + STACKREFS_TO_PYOBJECTS(args, total_args, args_o); if (CONVERSION_FAILED(args_o)) { DECREF_INPUTS(); ERROR_IF(true, error); @@ -4080,7 +4084,7 @@ dummy_func( int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable_o))->co_flags; PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o)); _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit( - tstate, (PyFunctionObject *)PyStackRef_AsPyObjectSteal(callable), locals, + tstate, callable, locals, args, positional_args, kwnames_o, frame ); PyStackRef_CLOSE(kwnames); @@ -4148,7 +4152,7 @@ dummy_func( int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable_o))->co_flags; PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o)); new_frame = _PyEvalFramePushAndInit( - tstate, (PyFunctionObject *)PyStackRef_AsPyObjectSteal(callable), locals, + tstate, callable, locals, args, positional_args, kwnames_o, frame ); PyStackRef_CLOSE(kwnames); @@ -4332,9 +4336,9 @@ dummy_func( int code_flags = ((PyCodeObject *)PyFunction_GET_CODE(func))->co_flags; PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(func)); - _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit_Ex(tstate, - (PyFunctionObject *)PyStackRef_AsPyObjectSteal(func_st), locals, - nargs, callargs, kwargs, frame); + _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit_Ex( + tstate, func_st, locals, + nargs, callargs, kwargs, frame); // Need to manually shrink the stack since we exit with DISPATCH_INLINED. STACK_SHRINK(oparg + 3); if (new_frame == NULL) { @@ -4408,8 +4412,8 @@ dummy_func( } inst(RETURN_GENERATOR, (-- res)) { - assert(PyFunction_Check(frame->f_funcobj)); - PyFunctionObject *func = (PyFunctionObject *)frame->f_funcobj; + assert(PyStackRef_FunctionCheck(frame->f_funcobj)); + PyFunctionObject *func = (PyFunctionObject *)PyStackRef_AsPyObjectBorrow(frame->f_funcobj); PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func); if (gen == NULL) { ERROR_NO_POP(); @@ -4771,8 +4775,9 @@ dummy_func( } tier2 op(_CHECK_FUNCTION, (func_version/2 -- )) { - assert(PyFunction_Check(frame->f_funcobj)); - DEOPT_IF(((PyFunctionObject *)frame->f_funcobj)->func_version != func_version); + assert(PyStackRef_FunctionCheck(frame->f_funcobj)); + PyFunctionObject *func = (PyFunctionObject *)PyStackRef_AsPyObjectBorrow(frame->f_funcobj); + DEOPT_IF(func->func_version != func_version); } /* Internal -- for testing executors */ @@ -4831,6 +4836,14 @@ dummy_func( assert(((_PyExecutorObject *)executor)->vm_data.valid); } + tier2 op(_MAKE_WARM, (--)) { + current_executor->vm_data.warm = true; + // It's okay if this ends up going negative. + if (--tstate->interp->trace_run_counter == 0) { + _Py_set_eval_breaker_bit(tstate, _PY_EVAL_JIT_INVALIDATE_COLD_BIT); + } + } + tier2 op(_FATAL_ERROR, (--)) { assert(0); Py_FatalError("Fatal error uop executed."); diff --git a/Python/ceval.c b/Python/ceval.c index 44b39f5d36c93c..6e62939adb3745 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -196,7 +196,7 @@ lltrace_instruction(_PyInterpreterFrame *frame, static void lltrace_resume_frame(_PyInterpreterFrame *frame) { - PyObject *fobj = frame->f_funcobj; + PyObject *fobj = PyStackRef_AsPyObjectBorrow(frame->f_funcobj); if (!PyStackRef_CodeCheck(frame->f_executable) || fobj == NULL || !PyFunction_Check(fobj) @@ -277,7 +277,7 @@ static void monitor_throw(PyThreadState *tstate, static int check_args_iterable(PyThreadState *, PyObject *func, PyObject *vararg); static int get_exception_handler(PyCodeObject *, int, int*, int*, int*); static _PyInterpreterFrame * -_PyEvalFramePushAndInit_Ex(PyThreadState *tstate, PyFunctionObject *func, +_PyEvalFramePushAndInit_Ex(PyThreadState *tstate, _PyStackRef func, PyObject *locals, Py_ssize_t nargs, PyObject *callargs, PyObject *kwargs, _PyInterpreterFrame *previous); #ifdef HAVE_ERRNO_H @@ -781,7 +781,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int #ifdef Py_DEBUG /* Set these to invalid but identifiable values for debugging. */ - entry_frame.f_funcobj = (PyObject*)0xaaa0; + entry_frame.f_funcobj = (_PyStackRef){.bits = 0xaaa0}; entry_frame.f_locals = (PyObject*)0xaaa1; entry_frame.frame_obj = (PyFrameObject*)0xaaa2; entry_frame.f_globals = (PyObject*)0xaaa3; @@ -1719,18 +1719,19 @@ _PyEval_FrameClearAndPop(PyThreadState *tstate, _PyInterpreterFrame * frame) /* Consumes references to func, locals and all the args */ _PyInterpreterFrame * -_PyEvalFramePushAndInit(PyThreadState *tstate, PyFunctionObject *func, +_PyEvalFramePushAndInit(PyThreadState *tstate, _PyStackRef func, PyObject *locals, _PyStackRef const* args, size_t argcount, PyObject *kwnames, _PyInterpreterFrame *previous) { - PyCodeObject * code = (PyCodeObject *)func->func_code; + PyFunctionObject *func_obj = (PyFunctionObject *)PyStackRef_AsPyObjectBorrow(func); + PyCodeObject * code = (PyCodeObject *)func_obj->func_code; CALL_STAT_INC(frames_pushed); _PyInterpreterFrame *frame = _PyThreadState_PushFrame(tstate, code->co_framesize); if (frame == NULL) { goto fail; } _PyFrame_Initialize(frame, func, locals, code, 0, previous); - if (initialize_locals(tstate, func, frame->localsplus, args, argcount, kwnames)) { + if (initialize_locals(tstate, func_obj, frame->localsplus, args, argcount, kwnames)) { assert(frame->owner == FRAME_OWNED_BY_THREAD); clear_thread_frame(tstate, frame); return NULL; @@ -1738,7 +1739,7 @@ _PyEvalFramePushAndInit(PyThreadState *tstate, PyFunctionObject *func, return frame; fail: /* Consume the references */ - Py_DECREF(func); + PyStackRef_CLOSE(func); Py_XDECREF(locals); for (size_t i = 0; i < argcount; i++) { PyStackRef_CLOSE(args[i]); @@ -1754,7 +1755,7 @@ _PyEvalFramePushAndInit(PyThreadState *tstate, PyFunctionObject *func, } static _PyInterpreterFrame * -_PyEvalFramePushAndInit_UnTagged(PyThreadState *tstate, PyFunctionObject *func, +_PyEvalFramePushAndInit_UnTagged(PyThreadState *tstate, _PyStackRef func, PyObject *locals, PyObject *const* args, size_t argcount, PyObject *kwnames, _PyInterpreterFrame *previous) { @@ -1784,7 +1785,7 @@ _PyEvalFramePushAndInit_UnTagged(PyThreadState *tstate, PyFunctionObject *func, Steals references to func, callargs and kwargs. */ static _PyInterpreterFrame * -_PyEvalFramePushAndInit_Ex(PyThreadState *tstate, PyFunctionObject *func, +_PyEvalFramePushAndInit_Ex(PyThreadState *tstate, _PyStackRef func, PyObject *locals, Py_ssize_t nargs, PyObject *callargs, PyObject *kwargs, _PyInterpreterFrame *previous) { bool has_dict = (kwargs != NULL && PyDict_GET_SIZE(kwargs) > 0); @@ -1793,7 +1794,7 @@ _PyEvalFramePushAndInit_Ex(PyThreadState *tstate, PyFunctionObject *func, if (has_dict) { newargs = _PyStack_UnpackDict(tstate, _PyTuple_ITEMS(callargs), nargs, kwargs, &kwnames); if (newargs == NULL) { - Py_DECREF(func); + PyStackRef_CLOSE(func); goto error; } } @@ -1805,7 +1806,7 @@ _PyEvalFramePushAndInit_Ex(PyThreadState *tstate, PyFunctionObject *func, } } _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit_UnTagged( - tstate, (PyFunctionObject *)func, locals, + tstate, func, locals, newargs, nargs, kwnames, previous ); if (has_dict) { @@ -1831,7 +1832,6 @@ _PyEval_Vector(PyThreadState *tstate, PyFunctionObject *func, { /* _PyEvalFramePushAndInit consumes the references * to func, locals and all its arguments */ - Py_INCREF(func); Py_XINCREF(locals); for (size_t i = 0; i < argcount; i++) { Py_INCREF(args[i]); @@ -1843,7 +1843,8 @@ _PyEval_Vector(PyThreadState *tstate, PyFunctionObject *func, } } _PyInterpreterFrame *frame = _PyEvalFramePushAndInit_UnTagged( - tstate, func, locals, args, argcount, kwnames, NULL); + tstate, PyStackRef_FromPyObjectNew(func), locals, + args, argcount, kwnames, NULL); if (frame == NULL) { return NULL; } diff --git a/Python/ceval_gil.c b/Python/ceval_gil.c index 6f4476d055b5ec..1d9381d09dfb62 100644 --- a/Python/ceval_gil.c +++ b/Python/ceval_gil.c @@ -1289,6 +1289,12 @@ _Py_HandlePending(PyThreadState *tstate) _Py_RunGC(tstate); } + if ((breaker & _PY_EVAL_JIT_INVALIDATE_COLD_BIT) != 0) { + _Py_unset_eval_breaker_bit(tstate, _PY_EVAL_JIT_INVALIDATE_COLD_BIT); + _Py_Executors_InvalidateCold(tstate->interp); + tstate->interp->trace_run_counter = JIT_CLEANUP_THRESHOLD; + } + /* GIL drop request */ if ((breaker & _PY_GIL_DROP_REQUEST_BIT) != 0) { /* Give another thread a chance */ diff --git a/Python/codecs.c b/Python/codecs.c index 9c0a3fad314cb5..68dc232bb86163 100644 --- a/Python/codecs.c +++ b/Python/codecs.c @@ -16,6 +16,12 @@ Copyright (c) Corporation for National Research Initiatives. #include "pycore_pystate.h" // _PyInterpreterState_GET() #include "pycore_ucnhash.h" // _PyUnicode_Name_CAPI +static const char *codecs_builtin_error_handlers[] = { + "strict", "ignore", "replace", + "xmlcharrefreplace", "backslashreplace", "namereplace", + "surrogatepass", "surrogateescape", +}; + const char *Py_hexdigits = "0123456789abcdef"; /* --- Codec Registry ----------------------------------------------------- */ @@ -618,6 +624,20 @@ int PyCodec_RegisterError(const char *name, PyObject *error) name, error); } +int _PyCodec_UnregisterError(const char *name) +{ + for (size_t i = 0; i < Py_ARRAY_LENGTH(codecs_builtin_error_handlers); ++i) { + if (strcmp(name, codecs_builtin_error_handlers[i]) == 0) { + PyErr_Format(PyExc_ValueError, + "cannot un-register built-in error handler '%s'", name); + return -1; + } + } + PyInterpreterState *interp = _PyInterpreterState_GET(); + assert(interp->codecs.initialized); + return PyDict_PopString(interp->codecs.error_registry, name, NULL); +} + /* Lookup the error handling callback function registered under the name error. As a special case NULL can be passed, in which case the error handling callback for strict encoding will be returned. */ @@ -1470,6 +1490,8 @@ _PyCodec_InitRegistry(PyInterpreterState *interp) } } }; + // ensure that the built-in error handlers' names are kept in sync + assert(Py_ARRAY_LENGTH(methods) == Py_ARRAY_LENGTH(codecs_builtin_error_handlers)); assert(interp->codecs.initialized == 0); interp->codecs.search_path = PyList_New(0); diff --git a/Python/codegen.c b/Python/codegen.c index 0305f4299aec56..896c30cc14952a 100644 --- a/Python/codegen.c +++ b/Python/codegen.c @@ -3140,17 +3140,15 @@ codegen_boolop(compiler *c, expr_ty e) location loc = LOC(e); assert(e->kind == BoolOp_kind); if (e->v.BoolOp.op == And) - jumpi = POP_JUMP_IF_FALSE; + jumpi = JUMP_IF_FALSE; else - jumpi = POP_JUMP_IF_TRUE; + jumpi = JUMP_IF_TRUE; NEW_JUMP_TARGET_LABEL(c, end); s = e->v.BoolOp.values; n = asdl_seq_LEN(s) - 1; assert(n >= 0); for (i = 0; i < n; ++i) { VISIT(c, expr, (expr_ty)asdl_seq_GET(s, i)); - ADDOP_I(c, loc, COPY, 1); - ADDOP(c, loc, TO_BOOL); ADDOP_JUMP(c, loc, jumpi, end); ADDOP(c, loc, POP_TOP); } diff --git a/Python/compile.c b/Python/compile.c index 7b3e6f336e44b1..9826d3fbbde976 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -911,7 +911,17 @@ PyObject * _PyCompile_StaticAttributesAsTuple(compiler *c) { assert(c->u->u_static_attributes); - return PySequence_Tuple(c->u->u_static_attributes); + PyObject *static_attributes_unsorted = PySequence_List(c->u->u_static_attributes); + if (static_attributes_unsorted == NULL) { + return NULL; + } + if (PyList_Sort(static_attributes_unsorted) != 0) { + Py_DECREF(static_attributes_unsorted); + return NULL; + } + PyObject *static_attributes = PySequence_Tuple(static_attributes_unsorted); + Py_DECREF(static_attributes_unsorted); + return static_attributes; } int diff --git a/Python/context.c b/Python/context.c index e52efbb6516d5c..ddb03555f9e402 100644 --- a/Python/context.c +++ b/Python/context.c @@ -112,10 +112,10 @@ context_event_name(PyContextEvent event) { Py_UNREACHABLE(); } -static void notify_context_watchers(PyContextEvent event, PyContext *ctx) +static void notify_context_watchers(PyContextEvent event, PyContext *ctx, PyThreadState *ts) { assert(Py_REFCNT(ctx) > 0); - PyInterpreterState *interp = _PyInterpreterState_GET(); + PyInterpreterState *interp = ts->interp; assert(interp->_initialized); uint8_t bits = interp->active_context_watchers; int i = 0; @@ -192,7 +192,7 @@ _PyContext_Enter(PyThreadState *ts, PyObject *octx) ts->context = Py_NewRef(ctx); ts->context_ver++; - notify_context_watchers(Py_CONTEXT_EVENT_ENTER, ctx); + notify_context_watchers(Py_CONTEXT_EVENT_ENTER, ctx, ts); return 0; } @@ -226,7 +226,7 @@ _PyContext_Exit(PyThreadState *ts, PyObject *octx) return -1; } - notify_context_watchers(Py_CONTEXT_EVENT_EXIT, ctx); + notify_context_watchers(Py_CONTEXT_EVENT_EXIT, ctx, ts); Py_SETREF(ts->context, (PyObject *)ctx->ctx_prev); ts->context_ver++; diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index 93ab068f9de949..650bf4533a3a86 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -1018,7 +1018,6 @@ JUMP_TO_JUMP_TARGET(); } STAT_INC(BINARY_SUBSCR, hit); - Py_INCREF(getitem); break; } @@ -1031,7 +1030,7 @@ PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(container)); PyHeapTypeObject *ht = (PyHeapTypeObject *)tp; PyObject *getitem = ht->_spec_cache.getitem; - new_frame = _PyFrame_PushUnchecked(tstate, (PyFunctionObject *)getitem, 2, frame); + new_frame = _PyFrame_PushUnchecked(tstate, PyStackRef_FromPyObjectNew(getitem), 2, frame); stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); new_frame->localsplus[0] = container; @@ -1852,8 +1851,9 @@ oparg = CURRENT_OPARG(); /* Copy closure variables to free variables */ PyCodeObject *co = _PyFrame_GetCode(frame); - assert(PyFunction_Check(frame->f_funcobj)); - PyObject *closure = ((PyFunctionObject *)frame->f_funcobj)->func_closure; + assert(PyStackRef_FunctionCheck(frame->f_funcobj)); + PyFunctionObject *func = (PyFunctionObject *)PyStackRef_AsPyObjectBorrow(frame->f_funcobj); + PyObject *closure = func->func_closure; assert(oparg == co->co_nfreevars); int offset = co->co_nlocalsplus - oparg; for (int i = 0; i < oparg; ++i) { @@ -2553,8 +2553,7 @@ JUMP_TO_JUMP_TARGET(); } STAT_INC(LOAD_ATTR, hit); - Py_INCREF(fget); - new_frame = _PyFrame_PushUnchecked(tstate, f, 1, frame); + new_frame = _PyFrame_PushUnchecked(tstate, PyStackRef_FromPyObjectNew(fget), 1, frame); new_frame->localsplus[0] = owner; stack_pointer[-1].bits = (uintptr_t)new_frame; break; @@ -3603,7 +3602,7 @@ int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable_o))->co_flags; PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o)); new_frame = _PyEvalFramePushAndInit( - tstate, (PyFunctionObject *)PyStackRef_AsPyObjectSteal(callable), locals, + tstate, callable, locals, args, total_args, NULL, frame ); // The frame has stolen all the arguments from the stack, @@ -3833,11 +3832,9 @@ args = &stack_pointer[-oparg]; self_or_null = &stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; - PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable); int has_self = !PyStackRef_IsNull(self_or_null[0]); STAT_INC(CALL, hit); - PyFunctionObject *func = (PyFunctionObject *)callable_o; - new_frame = _PyFrame_PushUnchecked(tstate, func, oparg + has_self, frame); + new_frame = _PyFrame_PushUnchecked(tstate, callable, oparg + has_self, frame); _PyStackRef *first_non_self_local = new_frame->localsplus + has_self; new_frame->localsplus[0] = self_or_null[0]; for (int i = 0; i < oparg; i++) { @@ -3859,11 +3856,9 @@ args = &stack_pointer[-oparg]; self_or_null = &stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; - PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable); int has_self = !PyStackRef_IsNull(self_or_null[0]); STAT_INC(CALL, hit); - PyFunctionObject *func = (PyFunctionObject *)callable_o; - new_frame = _PyFrame_PushUnchecked(tstate, func, oparg + has_self, frame); + new_frame = _PyFrame_PushUnchecked(tstate, callable, oparg + has_self, frame); _PyStackRef *first_non_self_local = new_frame->localsplus + has_self; new_frame->localsplus[0] = self_or_null[0]; for (int i = 0; i < oparg; i++) { @@ -3885,11 +3880,9 @@ args = &stack_pointer[-oparg]; self_or_null = &stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; - PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable); int has_self = !PyStackRef_IsNull(self_or_null[0]); STAT_INC(CALL, hit); - PyFunctionObject *func = (PyFunctionObject *)callable_o; - new_frame = _PyFrame_PushUnchecked(tstate, func, oparg + has_self, frame); + new_frame = _PyFrame_PushUnchecked(tstate, callable, oparg + has_self, frame); _PyStackRef *first_non_self_local = new_frame->localsplus + has_self; new_frame->localsplus[0] = self_or_null[0]; for (int i = 0; i < oparg; i++) { @@ -3911,11 +3904,9 @@ args = &stack_pointer[-oparg]; self_or_null = &stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; - PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable); int has_self = !PyStackRef_IsNull(self_or_null[0]); STAT_INC(CALL, hit); - PyFunctionObject *func = (PyFunctionObject *)callable_o; - new_frame = _PyFrame_PushUnchecked(tstate, func, oparg + has_self, frame); + new_frame = _PyFrame_PushUnchecked(tstate, callable, oparg + has_self, frame); _PyStackRef *first_non_self_local = new_frame->localsplus + has_self; new_frame->localsplus[0] = self_or_null[0]; for (int i = 0; i < oparg; i++) { @@ -3937,11 +3928,9 @@ args = &stack_pointer[-oparg]; self_or_null = &stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; - PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable); int has_self = !PyStackRef_IsNull(self_or_null[0]); STAT_INC(CALL, hit); - PyFunctionObject *func = (PyFunctionObject *)callable_o; - new_frame = _PyFrame_PushUnchecked(tstate, func, oparg + has_self, frame); + new_frame = _PyFrame_PushUnchecked(tstate, callable, oparg + has_self, frame); _PyStackRef *first_non_self_local = new_frame->localsplus + has_self; new_frame->localsplus[0] = self_or_null[0]; for (int i = 0; i < oparg; i++) { @@ -3962,11 +3951,9 @@ args = &stack_pointer[-oparg]; self_or_null = &stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; - PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable); int has_self = !PyStackRef_IsNull(self_or_null[0]); STAT_INC(CALL, hit); - PyFunctionObject *func = (PyFunctionObject *)callable_o; - new_frame = _PyFrame_PushUnchecked(tstate, func, oparg + has_self, frame); + new_frame = _PyFrame_PushUnchecked(tstate, callable, oparg + has_self, frame); _PyStackRef *first_non_self_local = new_frame->localsplus + has_self; new_frame->localsplus[0] = self_or_null[0]; for (int i = 0; i < oparg; i++) { @@ -4145,10 +4132,9 @@ assert(_PyCode_CODE(_PyFrame_GetCode(shim))[0].op.code == EXIT_INIT_CHECK); /* Push self onto stack of shim */ shim->localsplus[0] = PyStackRef_DUP(self); - PyFunctionObject *init_func = (PyFunctionObject *)PyStackRef_AsPyObjectSteal(init); args[-1] = self; init_frame = _PyEvalFramePushAndInit( - tstate, init_func, NULL, args-1, oparg+1, NULL, shim); + tstate, init, NULL, args-1, oparg+1, NULL, shim); stack_pointer += -2 - oparg; assert(WITHIN_STACK_BOUNDS()); if (init_frame == NULL) { @@ -4610,7 +4596,7 @@ int nargs = total_args - 1; PyCFunctionFastWithKeywords cfunc = (PyCFunctionFastWithKeywords)(void(*)(void))meth->ml_meth; - STACKREFS_TO_PYOBJECTS(args, nargs, args_o); + STACKREFS_TO_PYOBJECTS(args, total_args, args_o); if (CONVERSION_FAILED(args_o)) { PyStackRef_CLOSE(callable); PyStackRef_CLOSE(self_or_null[0]); @@ -4727,7 +4713,7 @@ PyCFunctionFast cfunc = (PyCFunctionFast)(void(*)(void))meth->ml_meth; int nargs = total_args - 1; - STACKREFS_TO_PYOBJECTS(args, nargs, args_o); + STACKREFS_TO_PYOBJECTS(args, total_args, args_o); if (CONVERSION_FAILED(args_o)) { PyStackRef_CLOSE(callable); PyStackRef_CLOSE(self_or_null[0]); @@ -4780,7 +4766,7 @@ int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable_o))->co_flags; PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o)); new_frame = _PyEvalFramePushAndInit( - tstate, (PyFunctionObject *)PyStackRef_AsPyObjectSteal(callable), locals, + tstate, callable, locals, args, positional_args, kwnames_o, frame ); PyStackRef_CLOSE(kwnames); @@ -5001,8 +4987,8 @@ case _RETURN_GENERATOR: { _PyStackRef res; - assert(PyFunction_Check(frame->f_funcobj)); - PyFunctionObject *func = (PyFunctionObject *)frame->f_funcobj; + assert(PyStackRef_FunctionCheck(frame->f_funcobj)); + PyFunctionObject *func = (PyFunctionObject *)PyStackRef_AsPyObjectBorrow(frame->f_funcobj); PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func); if (gen == NULL) { JUMP_TO_ERROR(); @@ -5376,8 +5362,9 @@ case _CHECK_FUNCTION: { uint32_t func_version = (uint32_t)CURRENT_OPERAND(); - assert(PyFunction_Check(frame->f_funcobj)); - if (((PyFunctionObject *)frame->f_funcobj)->func_version != func_version) { + assert(PyStackRef_FunctionCheck(frame->f_funcobj)); + PyFunctionObject *func = (PyFunctionObject *)PyStackRef_AsPyObjectBorrow(frame->f_funcobj); + if (func->func_version != func_version) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } @@ -5448,6 +5435,15 @@ break; } + case _MAKE_WARM: { + current_executor->vm_data.warm = true; + // It's okay if this ends up going negative. + if (--tstate->interp->trace_run_counter == 0) { + _Py_set_eval_breaker_bit(tstate, _PY_EVAL_JIT_INVALIDATE_COLD_BIT); + } + break; + } + case _FATAL_ERROR: { assert(0); Py_FatalError("Fatal error uop executed."); diff --git a/Python/flowgraph.c b/Python/flowgraph.c index f7d8efb28e21c4..69d7e0a872aa48 100644 --- a/Python/flowgraph.c +++ b/Python/flowgraph.c @@ -1589,6 +1589,8 @@ basicblock_optimize_load_const(PyObject *const_cache, basicblock *bb, PyObject * switch(nextop) { case POP_JUMP_IF_FALSE: case POP_JUMP_IF_TRUE: + case JUMP_IF_FALSE: + case JUMP_IF_TRUE: { /* Remove LOAD_CONST const; conditional jump */ PyObject* cnt = get_const_value(opcode, oparg, consts); @@ -1600,8 +1602,11 @@ basicblock_optimize_load_const(PyObject *const_cache, basicblock *bb, PyObject * if (is_true == -1) { return ERROR; } - INSTR_SET_OP0(inst, NOP); - int jump_if_true = nextop == POP_JUMP_IF_TRUE; + if (PyCompile_OpcodeStackEffect(nextop, 0) == -1) { + /* POP_JUMP_IF_FALSE or POP_JUMP_IF_TRUE */ + INSTR_SET_OP0(inst, NOP); + } + int jump_if_true = (nextop == POP_JUMP_IF_TRUE || nextop == JUMP_IF_TRUE); if (is_true == jump_if_true) { bb->b_instr[i+1].i_opcode = JUMP; } @@ -1761,6 +1766,36 @@ optimize_basic_block(PyObject *const_cache, basicblock *bb, PyObject *consts) i -= jump_thread(bb, inst, target, POP_JUMP_IF_TRUE); } break; + case JUMP_IF_FALSE: + switch (target->i_opcode) { + case JUMP: + case JUMP_IF_FALSE: + i -= jump_thread(bb, inst, target, JUMP_IF_FALSE); + continue; + case JUMP_IF_TRUE: + // No need to check for loops here, a block's b_next + // cannot point to itself. + assert(inst->i_target != inst->i_target->b_next); + inst->i_target = inst->i_target->b_next; + i--; + continue; + } + break; + case JUMP_IF_TRUE: + switch (target->i_opcode) { + case JUMP: + case JUMP_IF_TRUE: + i -= jump_thread(bb, inst, target, JUMP_IF_TRUE); + continue; + case JUMP_IF_FALSE: + // No need to check for loops here, a block's b_next + // cannot point to itself. + assert(inst->i_target != inst->i_target->b_next); + inst->i_target = inst->i_target->b_next; + i--; + continue; + } + break; case JUMP: case JUMP_NO_INTERRUPT: switch (target->i_opcode) { @@ -2367,6 +2402,38 @@ push_cold_blocks_to_end(cfg_builder *g) { return SUCCESS; } +static int +convert_pseudo_conditional_jumps(cfg_builder *g) +{ + basicblock *entryblock = g->g_entryblock; + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + for (int i = 0; i < b->b_iused; i++) { + cfg_instr *instr = &b->b_instr[i]; + if (instr->i_opcode == JUMP_IF_FALSE || instr->i_opcode == JUMP_IF_TRUE) { + assert(i == b->b_iused - 1); + instr->i_opcode = instr->i_opcode == JUMP_IF_FALSE ? + POP_JUMP_IF_FALSE : POP_JUMP_IF_TRUE; + location loc = instr->i_loc; + cfg_instr copy = { + .i_opcode = COPY, + .i_oparg = 1, + .i_loc = loc, + .i_target = NULL, + }; + RETURN_IF_ERROR(basicblock_insert_instruction(b, i++, ©)); + cfg_instr to_bool = { + .i_opcode = TO_BOOL, + .i_oparg = 0, + .i_loc = loc, + .i_target = NULL, + }; + RETURN_IF_ERROR(basicblock_insert_instruction(b, i++, &to_bool)); + } + } + } + return SUCCESS; +} + static int convert_pseudo_ops(cfg_builder *g) { @@ -2826,6 +2893,8 @@ _PyCfg_OptimizedCfgToInstructionSequence(cfg_builder *g, int *stackdepth, int *nlocalsplus, _PyInstructionSequence *seq) { + RETURN_IF_ERROR(convert_pseudo_conditional_jumps(g)); + *stackdepth = calculate_stackdepth(g); if (*stackdepth < 0) { return ERROR; diff --git a/Python/frame.c b/Python/frame.c index d7bb29811bfa50..35e6c2d0a93333 100644 --- a/Python/frame.c +++ b/Python/frame.c @@ -13,11 +13,8 @@ _PyFrame_Traverse(_PyInterpreterFrame *frame, visitproc visit, void *arg) { Py_VISIT(frame->frame_obj); Py_VISIT(frame->f_locals); - Py_VISIT(frame->f_funcobj); - int err = _PyGC_VisitStackRef(&frame->f_executable, visit, arg); - if (err) { - return err; - } + _Py_VISIT_STACKREF(frame->f_funcobj); + _Py_VISIT_STACKREF(frame->f_executable); return _PyGC_VisitFrameStack(frame, visit, arg); } @@ -126,7 +123,7 @@ _PyFrame_ClearExceptCode(_PyInterpreterFrame *frame) Py_DECREF(f); } _PyFrame_ClearLocals(frame); - Py_DECREF(frame->f_funcobj); + PyStackRef_CLEAR(frame->f_funcobj); } /* Unstable API functions */ diff --git a/Python/gc.c b/Python/gc.c index 024d041437be4a..028657eb8999c1 100644 --- a/Python/gc.c +++ b/Python/gc.c @@ -1944,6 +1944,13 @@ _PyGC_DumpShutdownStats(PyInterpreterState *interp) } } +static void +finalize_unlink_gc_head(PyGC_Head *gc) { + PyGC_Head *prev = GC_PREV(gc); + PyGC_Head *next = GC_NEXT(gc); + _PyGCHead_SET_NEXT(prev, next); + _PyGCHead_SET_PREV(next, prev); +} void _PyGC_Fini(PyInterpreterState *interp) @@ -1952,9 +1959,25 @@ _PyGC_Fini(PyInterpreterState *interp) Py_CLEAR(gcstate->garbage); Py_CLEAR(gcstate->callbacks); - /* We expect that none of this interpreters objects are shared - with other interpreters. - See https://github.com/python/cpython/issues/90228. */ + /* Prevent a subtle bug that affects sub-interpreters that use basic + * single-phase init extensions (m_size == -1). Those extensions cause objects + * to be shared between interpreters, via the PyDict_Update(mdict, m_copy) call + * in import_find_extension(). + * + * If they are GC objects, their GC head next or prev links could refer to + * the interpreter _gc_runtime_state PyGC_Head nodes. Those nodes go away + * when the interpreter structure is freed and so pointers to them become + * invalid. If those objects are still used by another interpreter and + * UNTRACK is called on them, a crash will happen. We untrack the nodes + * here to avoid that. + * + * This bug was originally fixed when reported as gh-90228. The bug was + * re-introduced in gh-94673. + */ + finalize_unlink_gc_head(&gcstate->young.head); + finalize_unlink_gc_head(&gcstate->old[0].head); + finalize_unlink_gc_head(&gcstate->old[1].head); + finalize_unlink_gc_head(&gcstate->permanent_generation.head); } /* for debugging */ diff --git a/Python/gc_free_threading.c b/Python/gc_free_threading.c index c645f1b9a63806..a5bc9b9b5782b2 100644 --- a/Python/gc_free_threading.c +++ b/Python/gc_free_threading.c @@ -200,6 +200,7 @@ frame_disable_deferred_refcounting(_PyInterpreterFrame *frame) } } + frame->f_funcobj = PyStackRef_AsStrongReference(frame->f_funcobj); for (_PyStackRef *ref = frame->localsplus; ref < frame->stackpointer; ref++) { if (!PyStackRef_IsNull(*ref) && PyStackRef_IsDeferred(*ref)) { *ref = PyStackRef_AsStrongReference(*ref); @@ -994,9 +995,7 @@ _PyGC_VisitFrameStack(_PyInterpreterFrame *frame, visitproc visit, void *arg) _PyStackRef *ref = _PyFrame_GetLocalsArray(frame); /* locals and stack */ for (; ref < frame->stackpointer; ref++) { - if (_PyGC_VisitStackRef(ref, visit, arg) < 0) { - return -1; - } + _Py_VISIT_STACKREF(*ref); } return 0; } diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index 6d902e2c1d9ba8..1201fe82efb919 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -506,7 +506,6 @@ assert(code->co_argcount == 2); DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), BINARY_SUBSCR); STAT_INC(BINARY_SUBSCR, hit); - Py_INCREF(getitem); } // _BINARY_SUBSCR_INIT_CALL sub = stack_pointer[-1]; @@ -514,7 +513,7 @@ PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(container)); PyHeapTypeObject *ht = (PyHeapTypeObject *)tp; PyObject *getitem = ht->_spec_cache.getitem; - new_frame = _PyFrame_PushUnchecked(tstate, (PyFunctionObject *)getitem, 2, frame); + new_frame = _PyFrame_PushUnchecked(tstate, PyStackRef_FromPyObjectNew(getitem), 2, frame); stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); new_frame->localsplus[0] = container; @@ -892,7 +891,7 @@ int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable_o))->co_flags; PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o)); _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit( - tstate, (PyFunctionObject *)PyStackRef_AsPyObjectSteal(callable), locals, + tstate, callable, locals, args, total_args, NULL, frame ); // Manipulate stack directly since we leave using DISPATCH_INLINED(). @@ -1021,10 +1020,9 @@ assert(_PyCode_CODE(_PyFrame_GetCode(shim))[0].op.code == EXIT_INIT_CHECK); /* Push self onto stack of shim */ shim->localsplus[0] = PyStackRef_DUP(self); - PyFunctionObject *init_func = (PyFunctionObject *)PyStackRef_AsPyObjectSteal(init); args[-1] = self; init_frame = _PyEvalFramePushAndInit( - tstate, init_func, NULL, args-1, oparg+1, NULL, shim); + tstate, init, NULL, args-1, oparg+1, NULL, shim); stack_pointer += -2 - oparg; assert(WITHIN_STACK_BOUNDS()); if (init_frame == NULL) { @@ -1119,11 +1117,9 @@ // _INIT_CALL_PY_EXACT_ARGS args = &stack_pointer[-oparg]; { - PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable); int has_self = !PyStackRef_IsNull(self_or_null[0]); STAT_INC(CALL, hit); - PyFunctionObject *func = (PyFunctionObject *)callable_o; - new_frame = _PyFrame_PushUnchecked(tstate, func, oparg + has_self, frame); + new_frame = _PyFrame_PushUnchecked(tstate, callable, oparg + has_self, frame); _PyStackRef *first_non_self_local = new_frame->localsplus + has_self; new_frame->localsplus[0] = self_or_null[0]; for (int i = 0; i < oparg; i++) { @@ -1216,7 +1212,7 @@ int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable_o))->co_flags; PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o)); new_frame = _PyEvalFramePushAndInit( - tstate, (PyFunctionObject *)PyStackRef_AsPyObjectSteal(callable), locals, + tstate, callable, locals, args, total_args, NULL, frame ); // The frame has stolen all the arguments from the stack, @@ -1616,8 +1612,8 @@ Py_ssize_t nargs = PyTuple_GET_SIZE(callargs); int code_flags = ((PyCodeObject *)PyFunction_GET_CODE(func))->co_flags; PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(func)); - _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit_Ex(tstate, - (PyFunctionObject *)PyStackRef_AsPyObjectSteal(func_st), locals, + _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit_Ex( + tstate, func_st, locals, nargs, callargs, kwargs, frame); // Need to manually shrink the stack since we exit with DISPATCH_INLINED. STACK_SHRINK(oparg + 3); @@ -1802,7 +1798,7 @@ int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable_o))->co_flags; PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o)); _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit( - tstate, (PyFunctionObject *)PyStackRef_AsPyObjectSteal(callable), locals, + tstate, callable, locals, args, positional_args, kwnames_o, frame ); PyStackRef_CLOSE(kwnames); @@ -1936,7 +1932,7 @@ int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable_o))->co_flags; PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o)); new_frame = _PyEvalFramePushAndInit( - tstate, (PyFunctionObject *)PyStackRef_AsPyObjectSteal(callable), locals, + tstate, callable, locals, args, positional_args, kwnames_o, frame ); PyStackRef_CLOSE(kwnames); @@ -2104,7 +2100,7 @@ int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable_o))->co_flags; PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o)); new_frame = _PyEvalFramePushAndInit( - tstate, (PyFunctionObject *)PyStackRef_AsPyObjectSteal(callable), locals, + tstate, callable, locals, args, positional_args, kwnames_o, frame ); PyStackRef_CLOSE(kwnames); @@ -2256,7 +2252,7 @@ PyCFunctionFast cfunc = (PyCFunctionFast)(void(*)(void))meth->ml_meth; int nargs = total_args - 1; - STACKREFS_TO_PYOBJECTS(args, nargs, args_o); + STACKREFS_TO_PYOBJECTS(args, total_args, args_o); if (CONVERSION_FAILED(args_o)) { PyStackRef_CLOSE(callable); PyStackRef_CLOSE(self_or_null[0]); @@ -2337,7 +2333,7 @@ int nargs = total_args - 1; PyCFunctionFastWithKeywords cfunc = (PyCFunctionFastWithKeywords)(void(*)(void))meth->ml_meth; - STACKREFS_TO_PYOBJECTS(args, nargs, args_o); + STACKREFS_TO_PYOBJECTS(args, total_args, args_o); if (CONVERSION_FAILED(args_o)) { PyStackRef_CLOSE(callable); PyStackRef_CLOSE(self_or_null[0]); @@ -2649,11 +2645,9 @@ // _INIT_CALL_PY_EXACT_ARGS args = &stack_pointer[-oparg]; { - PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable); int has_self = !PyStackRef_IsNull(self_or_null[0]); STAT_INC(CALL, hit); - PyFunctionObject *func = (PyFunctionObject *)callable_o; - new_frame = _PyFrame_PushUnchecked(tstate, func, oparg + has_self, frame); + new_frame = _PyFrame_PushUnchecked(tstate, callable, oparg + has_self, frame); _PyStackRef *first_non_self_local = new_frame->localsplus + has_self; new_frame->localsplus[0] = self_or_null[0]; for (int i = 0; i < oparg; i++) { @@ -2726,7 +2720,7 @@ int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable_o))->co_flags; PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o)); new_frame = _PyEvalFramePushAndInit( - tstate, (PyFunctionObject *)PyStackRef_AsPyObjectSteal(callable), locals, + tstate, callable, locals, args, total_args, NULL, frame ); // The frame has stolen all the arguments from the stack, @@ -3274,8 +3268,9 @@ INSTRUCTION_STATS(COPY_FREE_VARS); /* Copy closure variables to free variables */ PyCodeObject *co = _PyFrame_GetCode(frame); - assert(PyFunction_Check(frame->f_funcobj)); - PyObject *closure = ((PyFunctionObject *)frame->f_funcobj)->func_closure; + assert(PyStackRef_FunctionCheck(frame->f_funcobj)); + PyFunctionObject *func = (PyFunctionObject *)PyStackRef_AsPyObjectBorrow(frame->f_funcobj); + PyObject *closure = func->func_closure; assert(oparg == co->co_nfreevars); int offset = co->co_nlocalsplus - oparg; for (int i = 0; i < oparg; ++i) { @@ -4102,7 +4097,7 @@ int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable_o))->co_flags; PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o)); _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit( - tstate, (PyFunctionObject *)PyStackRef_AsPyObjectSteal(callable), locals, + tstate, callable, locals, args, total_args, NULL, frame ); // Manipulate stack directly since we leave using DISPATCH_INLINED(). @@ -4997,8 +4992,8 @@ DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), LOAD_ATTR); STAT_INC(LOAD_ATTR, hit); PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 1); - Py_INCREF(f); - _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, f, 2, frame); + _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked( + tstate, PyStackRef_FromPyObjectNew(f), 2, frame); // Manipulate stack directly because we exit with DISPATCH_INLINED(). STACK_SHRINK(1); new_frame->localsplus[0] = owner; @@ -5328,8 +5323,7 @@ DEOPT_IF(code->co_argcount != 1, LOAD_ATTR); DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), LOAD_ATTR); STAT_INC(LOAD_ATTR, hit); - Py_INCREF(fget); - new_frame = _PyFrame_PushUnchecked(tstate, f, 1, frame); + new_frame = _PyFrame_PushUnchecked(tstate, PyStackRef_FromPyObjectNew(fget), 1, frame); new_frame->localsplus[0] = owner; } // _SAVE_RETURN_OFFSET @@ -6504,8 +6498,8 @@ next_instr += 1; INSTRUCTION_STATS(RETURN_GENERATOR); _PyStackRef res; - assert(PyFunction_Check(frame->f_funcobj)); - PyFunctionObject *func = (PyFunctionObject *)frame->f_funcobj; + assert(PyStackRef_FunctionCheck(frame->f_funcobj)); + PyFunctionObject *func = (PyFunctionObject *)PyStackRef_AsPyObjectBorrow(frame->f_funcobj); PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func); if (gen == NULL) { goto error; @@ -7718,8 +7712,8 @@ Py_ssize_t nargs = PyTuple_GET_SIZE(callargs); int code_flags = ((PyCodeObject *)PyFunction_GET_CODE(func))->co_flags; PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(func)); - _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit_Ex(tstate, - (PyFunctionObject *)PyStackRef_AsPyObjectSteal(func_st), locals, + _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit_Ex( + tstate, func_st, locals, nargs, callargs, kwargs, frame); // Need to manually shrink the stack since we exit with DISPATCH_INLINED. STACK_SHRINK(oparg + 3); diff --git a/Python/initconfig.c b/Python/initconfig.c index d93244f7f41084..58ac5e7d7eaeff 100644 --- a/Python/initconfig.c +++ b/Python/initconfig.c @@ -150,7 +150,7 @@ static const PyConfigSpec PYCONFIG_SPEC[] = { SPEC(orig_argv, WSTR_LIST, READ_ONLY, SYS_ATTR("orig_argv")), SPEC(parse_argv, BOOL, READ_ONLY, NO_SYS), SPEC(pathconfig_warnings, BOOL, READ_ONLY, NO_SYS), - SPEC(perf_profiling, BOOL, READ_ONLY, NO_SYS), + SPEC(perf_profiling, UINT, READ_ONLY, NO_SYS), SPEC(program_name, WSTR, READ_ONLY, NO_SYS), SPEC(run_command, WSTR_OPT, READ_ONLY, NO_SYS), SPEC(run_filename, WSTR_OPT, READ_ONLY, NO_SYS), diff --git a/Python/optimizer.c b/Python/optimizer.c index 9198e410627dd4..978649faa04d45 100644 --- a/Python/optimizer.c +++ b/Python/optimizer.c @@ -533,7 +533,7 @@ translate_bytecode_to_trace( { bool first = true; PyCodeObject *code = _PyFrame_GetCode(frame); - PyFunctionObject *func = (PyFunctionObject *)frame->f_funcobj; + PyFunctionObject *func = _PyFrame_GetFunction(frame); assert(PyFunction_Check(func)); PyCodeObject *initial_code = code; _Py_BloomFilter_Add(dependencies, initial_code); @@ -565,6 +565,7 @@ translate_bytecode_to_trace( code->co_firstlineno, 2 * INSTR_IP(initial_instr, code)); ADD_TO_TRACE(_START_EXECUTOR, 0, (uintptr_t)instr, INSTR_IP(instr, code)); + ADD_TO_TRACE(_MAKE_WARM, 0, 0, 0); uint32_t target = 0; for (;;) { @@ -1194,6 +1195,9 @@ make_executor_from_uops(_PyUOpInstruction *buffer, int length, const _PyBloomFil executor->jit_code = NULL; executor->jit_side_entry = NULL; executor->jit_size = 0; + // This is initialized to true so we can prevent the executor + // from being immediately detected as cold and invalidated. + executor->vm_data.warm = true; if (_PyJIT_Compile(executor, executor->trace, length)) { Py_DECREF(executor); return NULL; @@ -1659,4 +1663,42 @@ _Py_Executors_InvalidateAll(PyInterpreterState *interp, int is_invalidation) } } +void +_Py_Executors_InvalidateCold(PyInterpreterState *interp) +{ + /* Walk the list of executors */ + /* TO DO -- Use a tree to avoid traversing as many objects */ + PyObject *invalidate = PyList_New(0); + if (invalidate == NULL) { + goto error; + } + + /* Clearing an executor can deallocate others, so we need to make a list of + * executors to invalidate first */ + for (_PyExecutorObject *exec = interp->executor_list_head; exec != NULL;) { + assert(exec->vm_data.valid); + _PyExecutorObject *next = exec->vm_data.links.next; + + if (!exec->vm_data.warm && PyList_Append(invalidate, (PyObject *)exec) < 0) { + goto error; + } + else { + exec->vm_data.warm = false; + } + + exec = next; + } + for (Py_ssize_t i = 0; i < PyList_GET_SIZE(invalidate); i++) { + _PyExecutorObject *exec = (_PyExecutorObject *)PyList_GET_ITEM(invalidate, i); + executor_clear(exec); + } + Py_DECREF(invalidate); + return; +error: + PyErr_Clear(); + Py_XDECREF(invalidate); + // If we're truly out of memory, wiping out everything is a fine fallback + _Py_Executors_InvalidateAll(interp, 0); +} + #endif /* _Py_TIER2 */ diff --git a/Python/optimizer_analysis.c b/Python/optimizer_analysis.c index f7adb44c9e09ef..b202b58a8b7214 100644 --- a/Python/optimizer_analysis.c +++ b/Python/optimizer_analysis.c @@ -145,7 +145,7 @@ remove_globals(_PyInterpreterFrame *frame, _PyUOpInstruction *buffer, return 1; } PyObject *globals = frame->f_globals; - PyFunctionObject *function = (PyFunctionObject *)frame->f_funcobj; + PyFunctionObject *function = _PyFrame_GetFunction(frame); assert(PyFunction_Check(function)); assert(function->func_builtins == builtins); assert(function->func_globals == globals); diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h index a6cfa271ae6758..4d172e3c762704 100644 --- a/Python/optimizer_cases.c.h +++ b/Python/optimizer_cases.c.h @@ -2381,6 +2381,10 @@ break; } + case _MAKE_WARM: { + break; + } + case _FATAL_ERROR: { break; } diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 27faf723745c21..8aebbe5c405ffe 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -2503,18 +2503,12 @@ finalize_subinterpreters(void) static PyStatus add_main_module(PyInterpreterState *interp) { - PyObject *m, *d, *ann_dict; + PyObject *m, *d; m = PyImport_AddModuleObject(&_Py_ID(__main__)); if (m == NULL) return _PyStatus_ERR("can't create __main__ module"); d = PyModule_GetDict(m); - ann_dict = PyDict_New(); - if ((ann_dict == NULL) || - (PyDict_SetItemString(d, "__annotations__", ann_dict) < 0)) { - return _PyStatus_ERR("Failed to initialize __main__.__annotations__"); - } - Py_DECREF(ann_dict); int has_builtins = PyDict_ContainsString(d, "__builtins__"); if (has_builtins < 0) { diff --git a/Python/pystate.c b/Python/pystate.c index 6bf7ebeb75ff73..6b85e5a64fefcf 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -660,6 +660,7 @@ init_interpreter(PyInterpreterState *interp, #ifdef _Py_TIER2 (void)_Py_SetOptimizer(interp, NULL); interp->executor_list_head = NULL; + interp->trace_run_counter = JIT_CLEANUP_THRESHOLD; #endif if (interp != &runtime->_main_interpreter) { /* Fix the self-referential, statically initialized fields. */ diff --git a/Python/sysmodule.c b/Python/sysmodule.c index 887a916563a2e1..ac343a8048e008 100644 --- a/Python/sysmodule.c +++ b/Python/sysmodule.c @@ -2384,10 +2384,11 @@ sys__getframemodulename_impl(PyObject *module, int depth) while (f && (_PyFrame_IsIncomplete(f) || depth-- > 0)) { f = f->previous; } - if (f == NULL || f->f_funcobj == NULL) { + if (f == NULL || PyStackRef_IsNull(f->f_funcobj)) { Py_RETURN_NONE; } - PyObject *r = PyFunction_GetModule(f->f_funcobj); + PyObject *func = PyStackRef_AsPyObjectBorrow(f->f_funcobj); + PyObject *r = PyFunction_GetModule(func); if (!r) { PyErr_Clear(); r = Py_None; diff --git a/Tools/build/generate_global_objects.py b/Tools/build/generate_global_objects.py index 882918fafb1edd..b5b6de0e7dc2dc 100644 --- a/Tools/build/generate_global_objects.py +++ b/Tools/build/generate_global_objects.py @@ -433,7 +433,7 @@ def get_identifiers_and_strings() -> 'tuple[set[str], dict[str, str]]': # Give a nice message for common mistakes. # To cover tricky cases (like "\n") we also generate C asserts. raise ValueError( - 'do not use &_PyID or &_Py_STR for one-character latin-1 ' + 'do not use &_Py_ID or &_Py_STR for one-character latin-1 ' + f'strings, use _Py_LATIN1_CHR instead: {string!r}') if string not in strings: strings[string] = name @@ -442,7 +442,7 @@ def get_identifiers_and_strings() -> 'tuple[set[str], dict[str, str]]': overlap = identifiers & set(strings.keys()) if overlap: raise ValueError( - 'do not use both _PyID and _Py_DECLARE_STR for the same string: ' + 'do not use both _Py_ID and _Py_DECLARE_STR for the same string: ' + repr(overlap)) return identifiers, strings diff --git a/Tools/c-analyzer/cpython/globals-to-fix.tsv b/Tools/c-analyzer/cpython/globals-to-fix.tsv index e1c07f88b963bc..a0be2a0a203f8c 100644 --- a/Tools/c-analyzer/cpython/globals-to-fix.tsv +++ b/Tools/c-analyzer/cpython/globals-to-fix.tsv @@ -423,6 +423,7 @@ Modules/readline.c - libedit_history_start - Modules/_ctypes/cfield.c - formattable - Modules/_ctypes/malloc_closure.c - free_list - +Modules/_cursesmodule.c - curses_global_state - Modules/_curses_panel.c - lop - Modules/_ssl/debughelpers.c _PySSL_keylog_callback lock - Modules/_tkinter.c - quitMainLoop - diff --git a/Tools/c-analyzer/cpython/ignored.tsv b/Tools/c-analyzer/cpython/ignored.tsv index f4dc807198a8ef..e6c599a2ac4a46 100644 --- a/Tools/c-analyzer/cpython/ignored.tsv +++ b/Tools/c-analyzer/cpython/ignored.tsv @@ -345,6 +345,7 @@ Python/ast_opt.c fold_unaryop ops - Python/ceval.c - _PyEval_BinaryOps - Python/ceval.c - _Py_INTERPRETER_TRAMPOLINE_INSTRUCTIONS - Python/codecs.c - Py_hexdigits - +Python/codecs.c - codecs_builtin_error_handlers - Python/codecs.c - ucnhash_capi - Python/codecs.c _PyCodec_InitRegistry methods - Python/compile.c - NO_LOCATION - diff --git a/Tools/cases_generator/analyzer.py b/Tools/cases_generator/analyzer.py index 3cc36b6b5841bd..a4ce207703edcd 100644 --- a/Tools/cases_generator/analyzer.py +++ b/Tools/cases_generator/analyzer.py @@ -248,6 +248,7 @@ class PseudoInstruction: name: str stack: StackEffect targets: list[Instruction] + as_sequence: bool flags: list[str] opcode: int = -1 @@ -365,12 +366,24 @@ def find_assignment_target(idx: int) -> list[lexer.Token]: offset += 1 return [] + def in_frame_push(idx: int) -> bool: + for tkn in reversed(node.block.tokens[: idx - 1]): + if tkn.kind == "SEMI" or tkn.kind == "LBRACE" or tkn.kind == "RBRACE": + return False + if tkn.kind == "IDENTIFIER" and tkn.text == "_PyFrame_PushUnchecked": + return True + return False + refs: dict[lexer.Token, str | None] = {} for idx, tkn in enumerate(node.block.tokens): if tkn.kind != "IDENTIFIER" or tkn.text != "PyStackRef_FromPyObjectNew": continue if idx == 0 or node.block.tokens[idx - 1].kind != "EQUALS": + if in_frame_push(idx): + # PyStackRef_FromPyObjectNew() is called in _PyFrame_PushUnchecked() + refs[tkn] = None + continue raise analysis_error("Expected '=' before PyStackRef_FromPyObjectNew", tkn) lhs = find_assignment_target(idx) @@ -527,6 +540,7 @@ def has_error_without_pop(op: parser.InstDef) -> bool: "_PyList_FromStackRefSteal", "_PyTuple_FromArraySteal", "_PyTuple_FromStackRefSteal", + "_Py_set_eval_breaker_bit" ) ESCAPING_FUNCTIONS = ( @@ -840,6 +854,7 @@ def add_pseudo( pseudo.name, analyze_stack(pseudo), [instructions[target] for target in pseudo.targets], + pseudo.as_sequence, pseudo.flags, ) diff --git a/Tools/cases_generator/generators_common.py b/Tools/cases_generator/generators_common.py index 2f8fccec2ea409..4cfd4ad3d05988 100644 --- a/Tools/cases_generator/generators_common.py +++ b/Tools/cases_generator/generators_common.py @@ -200,15 +200,16 @@ def py_stack_ref_from_py_object_new( stack: Stack, inst: Instruction | None, ) -> None: - self.out.emit(tkn) - emit_to(self.out, tkn_iter, "SEMI") - self.out.emit(";\n") - target = uop.deferred_refs[tkn] if target is None: # An assignment we don't handle, such as to a pointer or array. + self.out.emit(tkn) return + self.out.emit(tkn) + emit_to(self.out, tkn_iter, "SEMI") + self.out.emit(";\n") + # Flush the assignment to the stack. Note that we don't flush the # stack pointer here, and instead are currently relying on initializing # unused portions of the stack to NULL. diff --git a/Tools/cases_generator/opcode_metadata_generator.py b/Tools/cases_generator/opcode_metadata_generator.py index 9b1bc98b5c08d7..2ad7604af9cc0d 100644 --- a/Tools/cases_generator/opcode_metadata_generator.py +++ b/Tools/cases_generator/opcode_metadata_generator.py @@ -305,6 +305,7 @@ def generate_pseudo_targets(analysis: Analysis, out: CWriter) -> None: table_size = len(analysis.pseudos) max_targets = max(len(pseudo.targets) for pseudo in analysis.pseudos.values()) out.emit("struct pseudo_targets {\n") + out.emit(f"uint8_t as_sequence;\n") out.emit(f"uint8_t targets[{max_targets + 1}];\n") out.emit("};\n") out.emit( @@ -315,10 +316,11 @@ def generate_pseudo_targets(analysis: Analysis, out: CWriter) -> None: f"const struct pseudo_targets _PyOpcode_PseudoTargets[{table_size}] = {{\n" ) for pseudo in analysis.pseudos.values(): + as_sequence = "1" if pseudo.as_sequence else "0" targets = ["0"] * (max_targets + 1) for i, target in enumerate(pseudo.targets): targets[i] = target.name - out.emit(f"[{pseudo.name}-256] = {{ {{ {', '.join(targets)} }} }},\n") + out.emit(f"[{pseudo.name}-256] = {{ {as_sequence}, {{ {', '.join(targets)} }} }},\n") out.emit("};\n\n") out.emit("#endif // NEED_OPCODE_METADATA\n") out.emit("static inline bool\n") diff --git a/Tools/cases_generator/parsing.py b/Tools/cases_generator/parsing.py index ab5444d41ac6a9..de31d9b232f9df 100644 --- a/Tools/cases_generator/parsing.py +++ b/Tools/cases_generator/parsing.py @@ -148,6 +148,7 @@ class Pseudo(Node): outputs: list[OutputEffect] flags: list[str] # instr flags to set on the pseudo instruction targets: list[str] # opcodes this can be replaced by + as_sequence: bool AstNode = InstDef | Macro | Pseudo | Family @@ -423,16 +424,22 @@ def pseudo_def(self) -> Pseudo | None: flags = [] if self.expect(lx.RPAREN): if self.expect(lx.EQUALS): - if not self.expect(lx.LBRACE): - raise self.make_syntax_error("Expected {") - if members := self.members(): - if self.expect(lx.RBRACE) and self.expect(lx.SEMI): + if self.expect(lx.LBRACE): + as_sequence = False + closing = lx.RBRACE + elif self.expect(lx.LBRACKET): + as_sequence = True + closing = lx.RBRACKET + else: + raise self.make_syntax_error("Expected { or [") + if members := self.members(allow_sequence=True): + if self.expect(closing) and self.expect(lx.SEMI): return Pseudo( - tkn.text, inp, outp, flags, members + tkn.text, inp, outp, flags, members, as_sequence ) return None - def members(self) -> list[str] | None: + def members(self, allow_sequence : bool=False) -> list[str] | None: here = self.getpos() if tkn := self.expect(lx.IDENTIFIER): members = [tkn.text] @@ -442,8 +449,10 @@ def members(self) -> list[str] | None: else: break peek = self.peek() - if not peek or peek.kind != lx.RBRACE: - raise self.make_syntax_error("Expected comma or right paren") + kinds = [lx.RBRACE, lx.RBRACKET] if allow_sequence else [lx.RBRACE] + if not peek or peek.kind not in kinds: + raise self.make_syntax_error( + f"Expected comma or right paren{'/bracket' if allow_sequence else ''}") return members self.setpos(here) return None diff --git a/Tools/jit/_targets.py b/Tools/jit/_targets.py index e37ee943999785..6c7b48f1f37865 100644 --- a/Tools/jit/_targets.py +++ b/Tools/jit/_targets.py @@ -139,6 +139,9 @@ async def _compile( "-fno-plt", # Don't call stack-smashing canaries that we can't find or patch: "-fno-stack-protector", + # On aarch64 Linux, intrinsics were being emitted and this flag + # was required to disable them. + "-mno-outline-atomics", "-std=c11", *self.args, ] diff --git a/Tools/jit/ignore-tests-emulated-linux.txt b/Tools/jit/ignore-tests-emulated-linux.txt index dbb364673b5c1a..e379e39def0eaf 100644 --- a/Tools/jit/ignore-tests-emulated-linux.txt +++ b/Tools/jit/ignore-tests-emulated-linux.txt @@ -1,4 +1,5 @@ test_multiprocessing_fork +test_strftime_y2k test.test_asyncio.test_unix_events.TestFork.test_fork_asyncio_run test.test_asyncio.test_unix_events.TestFork.test_fork_asyncio_subprocess test.test_asyncio.test_unix_events.TestFork.test_fork_signal_handling diff --git a/Tools/msi/bundle/Default.wxl b/Tools/msi/bundle/Default.wxl index 0014204e89d1bb..49f681d3e11d2e 100644 --- a/Tools/msi/bundle/Default.wxl +++ b/Tools/msi/bundle/Default.wxl @@ -123,7 +123,7 @@ Feel free to post at <a href="https://discuss.python.org/c/users/7">discus You must restart your computer to complete the rollback of the software. &Restart Unable to install [WixBundleName] due to an existing install. Use Programs and Features to modify, repair or remove [WixBundleName]. - At least Windows 8.1 or Windows Server 2012 are required to install [WixBundleName] + At least Windows 10 or Windows Server 2016 are required to install [WixBundleName] Visit <a href="https://www.python.org/downloads/">python.org</a> to download an earlier version of Python. Disable path length limit diff --git a/Tools/msi/bundle/bootstrap/PythonBootstrapperApplication.cpp b/Tools/msi/bundle/bootstrap/PythonBootstrapperApplication.cpp index 094ddba4f1ad8f..6f50200dc7b251 100644 --- a/Tools/msi/bundle/bootstrap/PythonBootstrapperApplication.cpp +++ b/Tools/msi/bundle/bootstrap/PythonBootstrapperApplication.cpp @@ -3086,11 +3086,13 @@ class PythonBootstrapperApplication : public CBalBaseBootstrapperApplication { LOC_STRING *pLocString = nullptr; if (IsWindowsServer()) { - if (IsWindowsVersionOrGreater(6, 2, 0)) { - BalLog(BOOTSTRAPPER_LOG_LEVEL_STANDARD, "Target OS is Windows Server 2012 or later"); + if (IsWindowsVersionOrGreater(10, 0, 0)) { + BalLog(BOOTSTRAPPER_LOG_LEVEL_STANDARD, "Target OS is Windows Server 2016 or later"); return; + } else if (IsWindowsVersionOrGreater(6, 2, 0)) { + BalLog(BOOTSTRAPPER_LOG_LEVEL_ERROR, "Detected Windows Server 2012"); } else if (IsWindowsVersionOrGreater(6, 1, 1)) { - BalLog(BOOTSTRAPPER_LOG_LEVEL_STANDARD, "Detected Windows Server 2008 R2"); + BalLog(BOOTSTRAPPER_LOG_LEVEL_ERROR, "Detected Windows Server 2008 R2"); } else if (IsWindowsVersionOrGreater(6, 1, 0)) { BalLog(BOOTSTRAPPER_LOG_LEVEL_ERROR, "Detected Windows Server 2008 R2"); } else if (IsWindowsVersionOrGreater(6, 0, 0)) { @@ -3104,8 +3106,7 @@ class PythonBootstrapperApplication : public CBalBaseBootstrapperApplication { BalLog(BOOTSTRAPPER_LOG_LEVEL_STANDARD, "Target OS is Windows 10 or later"); return; } else if (IsWindows8Point1OrGreater()) { - BalLog(BOOTSTRAPPER_LOG_LEVEL_STANDARD, "Target OS is Windows 8.1"); - return; + BalLog(BOOTSTRAPPER_LOG_LEVEL_ERROR, "Detected Windows 8.1"); } else if (IsWindows8OrGreater()) { BalLog(BOOTSTRAPPER_LOG_LEVEL_ERROR, "Detected Windows 8"); } else if (IsWindows7OrGreater()) { diff --git a/Tools/wasm/Setup.local.example b/Tools/wasm/Setup.local.example deleted file mode 100644 index 7b2fb13f6ceef2..00000000000000 --- a/Tools/wasm/Setup.local.example +++ /dev/null @@ -1,13 +0,0 @@ -# Module/Setup.local with reduced stdlib -*disabled* -_asyncio -_bz2 -_decimal -_pickle -pyexpat _elementtree -_sha3 _blake2 -_zoneinfo -xxsubtype - -# cjk codecs -#_multibytecodec _codecs_cn _codecs_hk _codecs_iso2022 _codecs_jp _codecs_kr _codecs_tw diff --git a/Tools/wasm/build_wasi.sh b/Tools/wasm/build_wasi.sh deleted file mode 100755 index 436306222ce1d0..00000000000000 --- a/Tools/wasm/build_wasi.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/bash - -set -e -x - -# Quick check to avoid running configure just to fail in the end. -if [ -f Programs/python.o ]; then - echo "Can't do an out-of-tree build w/ an in-place build pre-existing (i.e., found Programs/python.o)" >&2 - exit 1 -fi - -if [ ! -f Modules/Setup.local ]; then - touch Modules/Setup.local -fi - -# TODO: check if `make` and `pkgconfig` are installed -# TODO: detect if wasmtime is installed - -# Create the "build" Python. -mkdir -p builddir/build -pushd builddir/build -../../configure -C -make -s -j 4 all -export PYTHON_VERSION=`./python -c 'import sys; print(f"{sys.version_info.major}.{sys.version_info.minor}")'` -popd - -# Create the "host"/WASI Python. -export CONFIG_SITE="$(pwd)/Tools/wasm/config.site-wasm32-wasi" -export HOSTRUNNER="wasmtime run --mapdir /::$(pwd) --env PYTHONPATH=/builddir/wasi/build/lib.wasi-wasm32-$PYTHON_VERSION $(pwd)/builddir/wasi/python.wasm --" - -mkdir -p builddir/wasi -pushd builddir/wasi -../../Tools/wasm/wasi-env \ - ../../configure \ - -C \ - --host=wasm32-unknown-wasi \ - --build=$(../../config.guess) \ - --with-build-python=../build/python -make -s -j 4 all -# Create a helper script for executing the host/WASI Python. -printf "#!/bin/sh\nexec $HOSTRUNNER \"\$@\"\n" > run_wasi.sh -chmod 755 run_wasi.sh -./run_wasi.sh --version -popd - diff --git a/Tools/wasm/wasi-env b/Tools/wasm/wasi-env index 95eda863cb62c6..4c5078a1f675e2 100755 --- a/Tools/wasm/wasi-env +++ b/Tools/wasm/wasi-env @@ -1,6 +1,8 @@ #!/bin/sh set -e +# NOTE: to be removed once no longer used in https://github.com/python/buildmaster-config/blob/main/master/custom/factories.py . + # function usage() { echo "wasi-env - Run command with WASI-SDK"