diff --git a/Doc/conf.py b/Doc/conf.py index 0d7c0b553eaa74..dc09b0b51ca84c 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -245,10 +245,12 @@ # be resolved, as the method is currently undocumented. For context, see # https://github.com/python/cpython/pull/103289. ('py:meth', '_SubParsersAction.add_parser'), - # Attributes that definitely should be documented better, + # Attributes/methods/etc. that definitely should be documented better, # but are deferred for now: ('py:attr', '__annotations__'), + ('py:meth', '__missing__'), ('py:attr', '__wrapped__'), + ('py:meth', 'index'), # list.index, tuple.index, etc. ] # gh-106948: Copy standard C types declared in the "c:type" domain to the diff --git a/Doc/library/ast.rst b/Doc/library/ast.rst index 4ebbe0e5471c88..9997edd0fb5fc0 100644 --- a/Doc/library/ast.rst +++ b/Doc/library/ast.rst @@ -2457,6 +2457,13 @@ effects on the compilation of a program: Generates and returns an abstract syntax tree instead of returning a compiled code object. +.. data:: PyCF_OPTIMIZED_AST + + The returned AST is optimized according to the *optimize* argument + in :func:`compile` or :func:`ast.parse`. + + .. versionadded:: 3.13 + .. data:: PyCF_TYPE_COMMENTS Enables support for :pep:`484` and :pep:`526` style type comments diff --git a/Doc/library/collections.abc.rst b/Doc/library/collections.abc.rst index edc078953290d7..e0c72ff9249ee7 100644 --- a/Doc/library/collections.abc.rst +++ b/Doc/library/collections.abc.rst @@ -22,7 +22,7 @@ This module provides :term:`abstract base classes ` that can be used to test whether a class provides a particular interface; for -example, whether it is :term:`hashable` or whether it is a mapping. +example, whether it is :term:`hashable` or whether it is a :term:`mapping`. An :func:`issubclass` or :func:`isinstance` test for an interface works in one of three ways. @@ -73,7 +73,7 @@ of the API: >>> isinstance(D(), Sequence) True -In this example, class :class:`D` does not need to define +In this example, class :class:`!D` does not need to define ``__contains__``, ``__iter__``, and ``__reversed__`` because the :ref:`in-operator `, the :term:`iteration ` logic, and the :func:`reversed` function automatically fall back to @@ -183,14 +183,14 @@ ABC Inherits from Abstract Methods Mi .. rubric:: Footnotes -.. [1] These ABCs override :meth:`object.__subclasshook__` to support +.. [1] These ABCs override :meth:`~abc.ABCMeta.__subclasshook__` to support testing an interface by verifying the required methods are present and have not been set to :const:`None`. This only works for simple interfaces. More complex interfaces require registration or direct subclassing. .. [2] Checking ``isinstance(obj, Iterable)`` detects classes that are - registered as :class:`Iterable` or that have an :meth:`__iter__` + registered as :class:`Iterable` or that have an :meth:`~container.__iter__` method, but it does not detect classes that iterate with the :meth:`~object.__getitem__` method. The only reliable way to determine whether an object is :term:`iterable` is to call ``iter(obj)``. @@ -202,26 +202,27 @@ Collections Abstract Base Classes -- Detailed Descriptions .. class:: Container - ABC for classes that provide the :meth:`__contains__` method. + ABC for classes that provide the :meth:`~object.__contains__` method. .. class:: Hashable - ABC for classes that provide the :meth:`__hash__` method. + ABC for classes that provide the :meth:`~object.__hash__` method. .. class:: Sized - ABC for classes that provide the :meth:`__len__` method. + ABC for classes that provide the :meth:`~object.__len__` method. .. class:: Callable - ABC for classes that provide the :meth:`__call__` method. + ABC for classes that provide the :meth:`~object.__call__` method. .. class:: Iterable - ABC for classes that provide the :meth:`__iter__` method. + ABC for classes that provide the :meth:`~container.__iter__` method. Checking ``isinstance(obj, Iterable)`` detects classes that are registered - as :class:`Iterable` or that have an :meth:`__iter__` method, but it does + as :class:`Iterable` or that have an :meth:`~container.__iter__` method, + but it does not detect classes that iterate with the :meth:`~object.__getitem__` method. The only reliable way to determine whether an object is :term:`iterable` is to call ``iter(obj)``. @@ -240,17 +241,17 @@ Collections Abstract Base Classes -- Detailed Descriptions .. class:: Reversible - ABC for iterable classes that also provide the :meth:`__reversed__` + ABC for iterable classes that also provide the :meth:`~object.__reversed__` method. .. versionadded:: 3.6 .. class:: Generator - ABC for generator classes that implement the protocol defined in - :pep:`342` that extends iterators with the :meth:`~generator.send`, + ABC for :term:`generator` classes that implement the protocol defined in + :pep:`342` that extends :term:`iterators ` with the + :meth:`~generator.send`, :meth:`~generator.throw` and :meth:`~generator.close` methods. - See also the definition of :term:`generator`. .. versionadded:: 3.5 @@ -261,7 +262,7 @@ Collections Abstract Base Classes -- Detailed Descriptions ABCs for read-only and mutable :term:`sequences `. Implementation note: Some of the mixin methods, such as - :meth:`__iter__`, :meth:`__reversed__` and :meth:`index`, make + :meth:`~container.__iter__`, :meth:`~object.__reversed__` and :meth:`index`, make repeated calls to the underlying :meth:`~object.__getitem__` method. Consequently, if :meth:`~object.__getitem__` is implemented with constant access speed, the mixin methods will have linear performance; @@ -282,7 +283,7 @@ Collections Abstract Base Classes -- Detailed Descriptions .. class:: Set MutableSet - ABCs for read-only and mutable sets. + ABCs for read-only and mutable :ref:`sets `. .. class:: Mapping MutableMapping @@ -299,16 +300,16 @@ Collections Abstract Base Classes -- Detailed Descriptions .. class:: Awaitable ABC for :term:`awaitable` objects, which can be used in :keyword:`await` - expressions. Custom implementations must provide the :meth:`__await__` - method. + expressions. Custom implementations must provide the + :meth:`~object.__await__` method. :term:`Coroutine ` objects and instances of the :class:`~collections.abc.Coroutine` ABC are all instances of this ABC. .. note:: - In CPython, generator-based coroutines (generators decorated with - :func:`types.coroutine`) are - *awaitables*, even though they do not have an :meth:`__await__` method. + In CPython, generator-based coroutines (:term:`generators ` + decorated with :func:`@types.coroutine `) are + *awaitables*, even though they do not have an :meth:`~object.__await__` method. Using ``isinstance(gencoro, Awaitable)`` for them will return ``False``. Use :func:`inspect.isawaitable` to detect them. @@ -316,17 +317,17 @@ Collections Abstract Base Classes -- Detailed Descriptions .. class:: Coroutine - ABC for coroutine compatible classes. These implement the + ABC for :term:`coroutine` compatible classes. These implement the following methods, defined in :ref:`coroutine-objects`: :meth:`~coroutine.send`, :meth:`~coroutine.throw`, and :meth:`~coroutine.close`. Custom implementations must also implement - :meth:`__await__`. All :class:`Coroutine` instances are also instances of - :class:`Awaitable`. See also the definition of :term:`coroutine`. + :meth:`~object.__await__`. All :class:`Coroutine` instances are also + instances of :class:`Awaitable`. .. note:: - In CPython, generator-based coroutines (generators decorated with - :func:`types.coroutine`) are - *awaitables*, even though they do not have an :meth:`__await__` method. + In CPython, generator-based coroutines (:term:`generators ` + decorated with :func:`@types.coroutine `) are + *awaitables*, even though they do not have an :meth:`~object.__await__` method. Using ``isinstance(gencoro, Coroutine)`` for them will return ``False``. Use :func:`inspect.isawaitable` to detect them. @@ -334,7 +335,7 @@ Collections Abstract Base Classes -- Detailed Descriptions .. class:: AsyncIterable - ABC for classes that provide ``__aiter__`` method. See also the + ABC for classes that provide an ``__aiter__`` method. See also the definition of :term:`asynchronous iterable`. .. versionadded:: 3.5 @@ -348,7 +349,7 @@ Collections Abstract Base Classes -- Detailed Descriptions .. class:: AsyncGenerator - ABC for asynchronous generator classes that implement the protocol + ABC for :term:`asynchronous generator` classes that implement the protocol defined in :pep:`525` and :pep:`492`. .. versionadded:: 3.6 @@ -373,9 +374,9 @@ particular functionality, for example:: Several of the ABCs are also useful as mixins that make it easier to develop classes supporting container APIs. For example, to write a class supporting the full :class:`Set` API, it is only necessary to supply the three underlying -abstract methods: :meth:`__contains__`, :meth:`__iter__`, and :meth:`__len__`. -The ABC supplies the remaining methods such as :meth:`__and__` and -:meth:`isdisjoint`:: +abstract methods: :meth:`~object.__contains__`, :meth:`~container.__iter__`, and +:meth:`~object.__len__`. The ABC supplies the remaining methods such as +:meth:`!__and__` and :meth:`~frozenset.isdisjoint`:: class ListBasedSet(collections.abc.Set): ''' Alternate set implementation favoring space over speed @@ -403,23 +404,24 @@ Notes on using :class:`Set` and :class:`MutableSet` as a mixin: (1) Since some set operations create new sets, the default mixin methods need - a way to create new instances from an iterable. The class constructor is + a way to create new instances from an :term:`iterable`. The class constructor is assumed to have a signature in the form ``ClassName(iterable)``. - That assumption is factored-out to an internal classmethod called - :meth:`_from_iterable` which calls ``cls(iterable)`` to produce a new set. + That assumption is factored-out to an internal :class:`classmethod` called + :meth:`!_from_iterable` which calls ``cls(iterable)`` to produce a new set. If the :class:`Set` mixin is being used in a class with a different - constructor signature, you will need to override :meth:`_from_iterable` + constructor signature, you will need to override :meth:`!_from_iterable` with a classmethod or regular method that can construct new instances from an iterable argument. (2) To override the comparisons (presumably for speed, as the - semantics are fixed), redefine :meth:`__le__` and :meth:`__ge__`, + semantics are fixed), redefine :meth:`~object.__le__` and + :meth:`~object.__ge__`, then the other operations will automatically follow suit. (3) - The :class:`Set` mixin provides a :meth:`_hash` method to compute a hash value - for the set; however, :meth:`__hash__` is not defined because not all sets + The :class:`Set` mixin provides a :meth:`!_hash` method to compute a hash value + for the set; however, :meth:`~object.__hash__` is not defined because not all sets are :term:`hashable` or immutable. To add set hashability using mixins, inherit from both :meth:`Set` and :meth:`Hashable`, then define ``__hash__ = Set._hash``. diff --git a/Doc/library/csv.rst b/Doc/library/csv.rst index 4d52254e6d6db5..7a5589e68b3052 100644 --- a/Doc/library/csv.rst +++ b/Doc/library/csv.rst @@ -55,10 +55,11 @@ The :mod:`csv` module defines the following functions: .. function:: reader(csvfile, dialect='excel', **fmtparams) - Return a reader object which will iterate over lines in the given *csvfile*. - *csvfile* can be any object which supports the :term:`iterator` protocol and returns a - string each time its :meth:`!__next__` method is called --- :term:`file objects - ` and list objects are both suitable. If *csvfile* is a file object, + Return a :ref:`reader object ` that will process + lines from the given *csvfile*. A csvfile must be an iterable of + strings, each in the reader's defined csv format. + A csvfile is most commonly a file-like object or list. + If *csvfile* is a file object, it should be opened with ``newline=''``. [1]_ An optional *dialect* parameter can be given which is used to define a set of parameters specific to a particular CSV dialect. It may be an instance of a subclass of @@ -449,6 +450,8 @@ Dialects support the following attributes: When ``True``, raise exception :exc:`Error` on bad CSV input. The default is ``False``. +.. _reader-objects: + Reader Objects -------------- diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst index 1bab1fb9bd9464..686b37754368c1 100644 --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -1985,7 +1985,8 @@ Examples of working with a :class:`.time` object:: American EST and EDT. Special requirement for pickling: A :class:`tzinfo` subclass must have an - :meth:`__init__` method that can be called with no arguments, otherwise it can be + :meth:`~object.__init__` method that can be called with no arguments, + otherwise it can be pickled but possibly not unpickled again. This is a technical requirement that may be relaxed in the future. diff --git a/Doc/library/email.utils.rst b/Doc/library/email.utils.rst index 345b64001c1ace..d693a9bc3933b5 100644 --- a/Doc/library/email.utils.rst +++ b/Doc/library/email.utils.rst @@ -58,13 +58,18 @@ of the new API. begins with angle brackets, they are stripped off. -.. function:: parseaddr(address) +.. function:: parseaddr(address, *, strict=True) Parse address -- which should be the value of some address-containing field such as :mailheader:`To` or :mailheader:`Cc` -- into its constituent *realname* and *email address* parts. Returns a tuple of that information, unless the parse fails, in which case a 2-tuple of ``('', '')`` is returned. + If *strict* is true, use a strict parser which rejects malformed inputs. + + .. versionchanged:: 3.13 + Add *strict* optional parameter and reject malformed inputs by default. + .. function:: formataddr(pair, charset='utf-8') @@ -82,12 +87,15 @@ of the new API. Added the *charset* option. -.. function:: getaddresses(fieldvalues) +.. function:: getaddresses(fieldvalues, *, strict=True) This method returns a list of 2-tuples of the form returned by ``parseaddr()``. *fieldvalues* is a sequence of header field values as might be returned by - :meth:`Message.get_all `. Here's a simple - example that gets all the recipients of a message:: + :meth:`Message.get_all `. + + If *strict* is true, use a strict parser which rejects malformed inputs. + + Here's a simple example that gets all the recipients of a message:: from email.utils import getaddresses @@ -97,6 +105,9 @@ of the new API. resent_ccs = msg.get_all('resent-cc', []) all_recipients = getaddresses(tos + ccs + resent_tos + resent_ccs) + .. versionchanged:: 3.13 + Add *strict* optional parameter and reject malformed inputs by default. + .. function:: parsedate(date) diff --git a/Doc/library/exceptions.rst b/Doc/library/exceptions.rst index f7891f2732bdb1..f821776c286133 100644 --- a/Doc/library/exceptions.rst +++ b/Doc/library/exceptions.rst @@ -1009,9 +1009,9 @@ their subgroups based on the types of the contained exceptions. True - Note that :exc:`BaseExceptionGroup` defines :meth:`__new__`, so + Note that :exc:`BaseExceptionGroup` defines :meth:`~object.__new__`, so subclasses that need a different constructor signature need to - override that rather than :meth:`__init__`. For example, the following + override that rather than :meth:`~object.__init__`. For example, the following defines an exception group subclass which accepts an exit_code and and constructs the group's message from it. :: diff --git a/Doc/library/fractions.rst b/Doc/library/fractions.rst index 509c63686f5a7f..887c3844d20faa 100644 --- a/Doc/library/fractions.rst +++ b/Doc/library/fractions.rst @@ -106,6 +106,10 @@ another rational number, or from a string. presentation types ``"e"``, ``"E"``, ``"f"``, ``"F"``, ``"g"``, ``"G"`` and ``"%""``. + .. versionchanged:: 3.13 + Formatting of :class:`Fraction` instances without a presentation type + now supports fill, alignment, sign handling, minimum width and grouping. + .. attribute:: numerator Numerator of the Fraction in lowest term. @@ -201,17 +205,36 @@ another rational number, or from a string. .. method:: __format__(format_spec, /) - Provides support for float-style formatting of :class:`Fraction` - instances via the :meth:`str.format` method, the :func:`format` built-in - function, or :ref:`Formatted string literals `. The - presentation types ``"e"``, ``"E"``, ``"f"``, ``"F"``, ``"g"``, ``"G"`` - and ``"%"`` are supported. For these presentation types, formatting for a - :class:`Fraction` object ``x`` follows the rules outlined for - the :class:`float` type in the :ref:`formatspec` section. + Provides support for formatting of :class:`Fraction` instances via the + :meth:`str.format` method, the :func:`format` built-in function, or + :ref:`Formatted string literals `. + + If the ``format_spec`` format specification string does not end with one + of the presentation types ``'e'``, ``'E'``, ``'f'``, ``'F'``, ``'g'``, + ``'G'`` or ``'%'`` then formatting follows the general rules for fill, + alignment, sign handling, minimum width, and grouping as described in the + :ref:`format specification mini-language `. The "alternate + form" flag ``'#'`` is supported: if present, it forces the output string + to always include an explicit denominator, even when the value being + formatted is an exact integer. The zero-fill flag ``'0'`` is not + supported. + + If the ``format_spec`` format specification string ends with one of + the presentation types ``'e'``, ``'E'``, ``'f'``, ``'F'``, ``'g'``, + ``'G'`` or ``'%'`` then formatting follows the rules outlined for the + :class:`float` type in the :ref:`formatspec` section. Here are some examples:: >>> from fractions import Fraction + >>> format(Fraction(103993, 33102), '_') + '103_993/33_102' + >>> format(Fraction(1, 7), '.^+10') + '...+1/7...' + >>> format(Fraction(3, 1), '') + '3' + >>> format(Fraction(3, 1), '#') + '3/1' >>> format(Fraction(1, 7), '.40g') '0.1428571428571428571428571428571428571429' >>> format(Fraction('1234567.855'), '_.2f') diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst index 56c66f670c74dd..c016fb76bfd0a0 100644 --- a/Doc/library/itertools.rst +++ b/Doc/library/itertools.rst @@ -164,11 +164,14 @@ loops that truncate the stream. Added the optional *initial* parameter. -.. function:: batched(iterable, n) +.. function:: batched(iterable, n, *, strict=False) Batch data from the *iterable* into tuples of length *n*. The last batch may be shorter than *n*. + If *strict* is true, will raise a :exc:`ValueError` if the final + batch is shorter than *n*. + Loops over the input iterable and accumulates data into tuples up to size *n*. The input is consumed lazily, just enough to fill a batch. The result is yielded as soon as the batch is full or when the input @@ -190,16 +193,21 @@ loops that truncate the stream. Roughly equivalent to:: - def batched(iterable, n): + def batched(iterable, n, *, strict=False): # batched('ABCDEFG', 3) --> ABC DEF G if n < 1: raise ValueError('n must be at least one') it = iter(iterable) while batch := tuple(islice(it, n)): + if strict and len(batch) != n: + raise ValueError('batched(): incomplete batch') yield batch .. versionadded:: 3.12 + .. versionchanged:: 3.13 + Added the *strict* option. + .. function:: chain(*iterables) @@ -1017,6 +1025,8 @@ which incur interpreter overhead. "List unique elements, preserving order. Remember only the element just seen." # unique_justseen('AAAABBBCCDAABBB') --> A B C D A B # unique_justseen('ABBcCAD', str.lower) --> A B c A D + if key is None: + return map(operator.itemgetter(0), groupby(iterable)) return map(next, map(operator.itemgetter(1), groupby(iterable, key))) @@ -1034,10 +1044,15 @@ The following recipes have a more mathematical flavor: # sum_of_squares([10, 20, 30]) -> 1400 return math.sumprod(*tee(it)) - def transpose(it): - "Swap the rows and columns of the input." + def reshape(matrix, cols): + "Reshape a 2-D matrix to have a given number of columns." + # reshape([(0, 1), (2, 3), (4, 5)], 3) --> (0, 1, 2), (3, 4, 5) + return batched(chain.from_iterable(matrix), cols, strict=True) + + def transpose(matrix): + "Swap the rows and columns of a 2-D matrix." # transpose([(1, 2, 3), (11, 22, 33)]) --> (1, 11) (2, 22) (3, 33) - return zip(*it, strict=True) + return zip(*matrix, strict=True) def matmul(m1, m2): "Multiply two matrices." @@ -1128,23 +1143,13 @@ The following recipes have a more mathematical flavor: if n > 1: yield n - def nth_combination(iterable, r, index): - "Equivalent to list(combinations(iterable, r))[index]" - pool = tuple(iterable) - n = len(pool) - c = math.comb(n, r) - if index < 0: - index += c - if index < 0 or index >= c: - raise IndexError - result = [] - while r: - c, n, r = c*r//n, n-1, r-1 - while index >= c: - index -= c - c, n = c*(n-r)//n, n-1 - result.append(pool[-1-n]) - return tuple(result) + def totient(n): + "Count of natural numbers up to n that are coprime to n." + # https://mathworld.wolfram.com/TotientFunction.html + # totient(12) --> 4 because len([1, 5, 7, 11]) == 4 + for p in unique_justseen(factor(n)): + n = n // p * (p - 1) + return n .. doctest:: @@ -1262,6 +1267,26 @@ The following recipes have a more mathematical flavor: >>> sum_of_squares([10, 20, 30]) 1400 + >>> list(reshape([(0, 1), (2, 3), (4, 5)], 3)) + [(0, 1, 2), (3, 4, 5)] + >>> M = [(0, 1, 2, 3), (4, 5, 6, 7), (8, 9, 10, 11)] + >>> list(reshape(M, 1)) + [(0,), (1,), (2,), (3,), (4,), (5,), (6,), (7,), (8,), (9,), (10,), (11,)] + >>> list(reshape(M, 2)) + [(0, 1), (2, 3), (4, 5), (6, 7), (8, 9), (10, 11)] + >>> list(reshape(M, 3)) + [(0, 1, 2), (3, 4, 5), (6, 7, 8), (9, 10, 11)] + >>> list(reshape(M, 4)) + [(0, 1, 2, 3), (4, 5, 6, 7), (8, 9, 10, 11)] + >>> list(reshape(M, 5)) + Traceback (most recent call last): + ... + ValueError: batched(): incomplete batch + >>> list(reshape(M, 6)) + [(0, 1, 2, 3, 4, 5), (6, 7, 8, 9, 10, 11)] + >>> list(reshape(M, 12)) + [(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)] + >>> list(transpose([(1, 2, 3), (11, 22, 33)])) [(1, 11), (2, 22), (3, 33)] @@ -1429,6 +1454,25 @@ The following recipes have a more mathematical flavor: >>> all(list(factor(n)) == sorted(factor(n)) for n in range(2_000)) True + >>> totient(0) # https://www.wolframalpha.com/input?i=totient+0 + 0 + >>> first_totients = [1, 1, 2, 2, 4, 2, 6, 4, 6, 4, 10, 4, 12, 6, 8, 8, 16, 6, + ... 18, 8, 12, 10, 22, 8, 20, 12, 18, 12, 28, 8, 30, 16, 20, 16, 24, 12, 36, 18, + ... 24, 16, 40, 12, 42, 20, 24, 22, 46, 16, 42, 20, 32, 24, 52, 18, 40, 24, 36, + ... 28, 58, 16, 60, 30, 36, 32, 48, 20, 66, 32, 44] # https://oeis.org/A000010 + ... + >>> list(map(totient, range(1, 70))) == first_totients + True + >>> reference_totient = lambda n: sum(math.gcd(t, n) == 1 for t in range(1, n+1)) + >>> all(totient(n) == reference_totient(n) for n in range(1000)) + True + >>> totient(128_884_753_939) == 128_884_753_938 # large prime + True + >>> totient(999953 * 999983) == 999952 * 999982 # large semiprime + True + >>> totient(6 ** 20) == 1 * 2**19 * 2 * 3**19 # repeated primes + True + >>> list(flatten([('a', 'b'), (), ('c', 'd', 'e'), ('f',), ('g', 'h', 'i')])) ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i'] @@ -1550,20 +1594,6 @@ The following recipes have a more mathematical flavor: >>> first_true('ABC0DEF1', '9', str.isdigit) '0' - >>> population = 'ABCDEFGH' - >>> for r in range(len(population) + 1): - ... seq = list(combinations(population, r)) - ... for i in range(len(seq)): - ... assert nth_combination(population, r, i) == seq[i] - ... for i in range(-len(seq), 0): - ... assert nth_combination(population, r, i) == seq[i] - - >>> iterable = 'abcde' - >>> r = 3 - >>> combos = list(combinations(iterable, r)) - >>> all(nth_combination(iterable, r, i) == comb for i, comb in enumerate(combos)) - True - .. testcode:: :hide: @@ -1590,6 +1620,24 @@ The following recipes have a more mathematical flavor: for (a, _), (b, c) in pairwise(pairwise(iterable)): yield a, b, c + def nth_combination(iterable, r, index): + "Equivalent to list(combinations(iterable, r))[index]" + pool = tuple(iterable) + n = len(pool) + c = math.comb(n, r) + if index < 0: + index += c + if index < 0 or index >= c: + raise IndexError + result = [] + while r: + c, n, r = c*r//n, n-1, r-1 + while index >= c: + index -= c + c, n = c*(n-r)//n, n-1 + result.append(pool[-1-n]) + return tuple(result) + .. doctest:: :hide: @@ -1605,3 +1653,17 @@ The following recipes have a more mathematical flavor: >>> list(triplewise('ABCDEFG')) [('A', 'B', 'C'), ('B', 'C', 'D'), ('C', 'D', 'E'), ('D', 'E', 'F'), ('E', 'F', 'G')] + + >>> population = 'ABCDEFGH' + >>> for r in range(len(population) + 1): + ... seq = list(combinations(population, r)) + ... for i in range(len(seq)): + ... assert nth_combination(population, r, i) == seq[i] + ... for i in range(-len(seq), 0): + ... assert nth_combination(population, r, i) == seq[i] + + >>> iterable = 'abcde' + >>> r = 3 + >>> combos = list(combinations(iterable, r)) + >>> all(nth_combination(iterable, r, i) == comb for i, comb in enumerate(combos)) + True diff --git a/Doc/library/mailbox.rst b/Doc/library/mailbox.rst index fd60d163378f07..c98496d1fff993 100644 --- a/Doc/library/mailbox.rst +++ b/Doc/library/mailbox.rst @@ -13,8 +13,8 @@ This module defines two classes, :class:`Mailbox` and :class:`Message`, for accessing and manipulating on-disk mailboxes and the messages they contain. -:class:`Mailbox` offers a dictionary-like mapping from keys to messages. -:class:`Message` extends the :mod:`email.message` module's +:class:`!Mailbox` offers a dictionary-like mapping from keys to messages. +:class:`!Message` extends the :mod:`email.message` module's :class:`~email.message.Message` class with format-specific state and behavior. Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. @@ -27,37 +27,38 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. .. _mailbox-objects: -:class:`Mailbox` objects ------------------------- +:class:`!Mailbox` objects +------------------------- .. class:: Mailbox A mailbox, which may be inspected and modified. - The :class:`Mailbox` class defines an interface and is not intended to be + The :class:`!Mailbox` class defines an interface and is not intended to be instantiated. Instead, format-specific subclasses should inherit from - :class:`Mailbox` and your code should instantiate a particular subclass. + :class:`!Mailbox` and your code should instantiate a particular subclass. - The :class:`Mailbox` interface is dictionary-like, with small keys - corresponding to messages. Keys are issued by the :class:`Mailbox` instance - with which they will be used and are only meaningful to that :class:`Mailbox` + The :class:`!Mailbox` interface is dictionary-like, with small keys + corresponding to messages. Keys are issued by the :class:`!Mailbox` instance + with which they will be used and are only meaningful to that :class:`!Mailbox` instance. A key continues to identify a message even if the corresponding message is modified, such as by replacing it with another message. - Messages may be added to a :class:`Mailbox` instance using the set-like + Messages may be added to a :class:`!Mailbox` instance using the set-like method :meth:`add` and removed using a ``del`` statement or the set-like methods :meth:`remove` and :meth:`discard`. - :class:`Mailbox` interface semantics differ from dictionary semantics in some + :class:`!Mailbox` interface semantics differ from dictionary semantics in some noteworthy ways. Each time a message is requested, a new representation (typically a :class:`Message` instance) is generated based upon the current state of the mailbox. Similarly, when a message is added to a - :class:`Mailbox` instance, the provided message representation's contents are + :class:`!Mailbox` instance, the provided message representation's contents are copied. In neither case is a reference to the message representation kept by - the :class:`Mailbox` instance. + the :class:`!Mailbox` instance. - The default :class:`Mailbox` iterator iterates over message representations, - not keys as the default dictionary iterator does. Moreover, modification of a + The default :class:`!Mailbox` :term:`iterator` iterates over message + representations, not keys as the default :class:`dictionary ` + iterator does. Moreover, modification of a mailbox during iteration is safe and well-defined. Messages added to the mailbox after an iterator is created will not be seen by the iterator. Messages removed from the mailbox before the iterator yields them @@ -69,14 +70,15 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. Be very cautious when modifying mailboxes that might be simultaneously changed by some other process. The safest mailbox format to use for such - tasks is Maildir; try to avoid using single-file formats such as mbox for + tasks is :class:`Maildir`; try to avoid using single-file formats such as + :class:`mbox` for concurrent writing. If you're modifying a mailbox, you *must* lock it by calling the :meth:`lock` and :meth:`unlock` methods *before* reading any messages in the file or making any changes by adding or deleting a message. Failing to lock the mailbox runs the risk of losing messages or corrupting the entire mailbox. - :class:`Mailbox` instances have the following methods: + :class:`!Mailbox` instances have the following methods: .. method:: add(message) @@ -127,21 +129,23 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. .. method:: iterkeys() - keys() - Return an iterator over all keys if called as :meth:`iterkeys` or return a - list of keys if called as :meth:`keys`. + Return an :term:`iterator` over all keys + + + .. method:: keys() + + The same as :meth:`iterkeys`, except that a :class:`list` is returned + rather than an :term:`iterator` .. method:: itervalues() __iter__() - values() - Return an iterator over representations of all messages if called as - :meth:`itervalues` or :meth:`__iter__` or return a list of such - representations if called as :meth:`values`. The messages are represented + Return an :term:`iterator` over representations of all messages. + The messages are represented as instances of the appropriate format-specific :class:`Message` subclass - unless a custom message factory was specified when the :class:`Mailbox` + unless a custom message factory was specified when the :class:`!Mailbox` instance was initialized. .. note:: @@ -150,15 +154,25 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. iterate over keys. + .. method:: values() + + The same as :meth:`itervalues`, except that a :class:`list` is returned + rather than an :term:`iterator` + + .. method:: iteritems() - items() - Return an iterator over (*key*, *message*) pairs, where *key* is a key and - *message* is a message representation, if called as :meth:`iteritems` or - return a list of such pairs if called as :meth:`items`. The messages are + Return an :term:`iterator` over (*key*, *message*) pairs, where *key* is + a key and *message* is a message representation. The messages are represented as instances of the appropriate format-specific :class:`Message` subclass unless a custom message factory was specified - when the :class:`Mailbox` instance was initialized. + when the :class:`!Mailbox` instance was initialized. + + + .. method:: items() + + The same as :meth:`iteritems`, except that a :class:`list` of pairs is + returned rather than an :term:`iterator` of pairs. .. method:: get(key, default=None) @@ -167,9 +181,9 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. Return a representation of the message corresponding to *key*. If no such message exists, *default* is returned if the method was called as :meth:`get` and a :exc:`KeyError` exception is raised if the method was - called as :meth:`~object.__getitem__`. The message is represented as an instance + called as :meth:`!__getitem__`. The message is represented as an instance of the appropriate format-specific :class:`Message` subclass unless a - custom message factory was specified when the :class:`Mailbox` instance + custom message factory was specified when the :class:`!Mailbox` instance was initialized. @@ -198,21 +212,23 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. .. method:: get_file(key) - Return a file-like representation of the message corresponding to *key*, + Return a :term:`file-like ` representation of the + message corresponding to *key*, or raise a :exc:`KeyError` exception if no such message exists. The file-like object behaves as if open in binary mode. This file should be closed once it is no longer needed. .. versionchanged:: 3.2 - The file object really is a binary file; previously it was incorrectly - returned in text mode. Also, the file-like object now supports the - context management protocol: you can use a :keyword:`with` statement to - automatically close it. + The file object really is a :term:`binary file`; previously it was + incorrectly returned in text mode. Also, the :term:`file-like object` + now supports the :term:`context manager` protocol: you can use a + :keyword:`with` statement to automatically close it. .. note:: - Unlike other representations of messages, file-like representations are - not necessarily independent of the :class:`Mailbox` instance that + Unlike other representations of messages, + :term:`file-like ` representations are not + necessarily independent of the :class:`!Mailbox` instance that created them or of the underlying mailbox. More specific documentation is provided by each subclass. @@ -238,7 +254,7 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. the message. If no such message exists, return *default*. The message is represented as an instance of the appropriate format-specific :class:`Message` subclass unless a custom message factory was specified - when the :class:`Mailbox` instance was initialized. + when the :class:`!Mailbox` instance was initialized. .. method:: popitem() @@ -248,7 +264,7 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. message. If the mailbox is empty, raise a :exc:`KeyError` exception. The message is represented as an instance of the appropriate format-specific :class:`Message` subclass unless a custom message factory was specified - when the :class:`Mailbox` instance was initialized. + when the :class:`!Mailbox` instance was initialized. .. method:: update(arg) @@ -259,7 +275,7 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. *message* as if by using :meth:`__setitem__`. As with :meth:`__setitem__`, each *key* must already correspond to a message in the mailbox or else a :exc:`KeyError` exception will be raised, so in general it is incorrect - for *arg* to be a :class:`Mailbox` instance. + for *arg* to be a :class:`!Mailbox` instance. .. note:: @@ -269,7 +285,7 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. .. method:: flush() Write any pending changes to the filesystem. For some :class:`Mailbox` - subclasses, changes are always written immediately and :meth:`flush` does + subclasses, changes are always written immediately and :meth:`!flush` does nothing, but you should still make a habit of calling this method. @@ -290,13 +306,13 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. .. method:: close() Flush the mailbox, unlock it if necessary, and close any open files. For - some :class:`Mailbox` subclasses, this method does nothing. + some :class:`!Mailbox` subclasses, this method does nothing. .. _mailbox-maildir: -:class:`Maildir` -^^^^^^^^^^^^^^^^ +:class:`!Maildir` objects +^^^^^^^^^^^^^^^^^^^^^^^^^ .. class:: Maildir(dirname, factory=None, create=True) @@ -330,11 +346,11 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. Folders of the style introduced by the Courier mail transfer agent are also supported. Any subdirectory of the main mailbox is considered a folder if ``'.'`` is the first character in its name. Folder names are represented by - :class:`Maildir` without the leading ``'.'``. Each folder is itself a Maildir + :class:`!Maildir` without the leading ``'.'``. Each folder is itself a Maildir mailbox but should not contain other folders. Instead, a logical nesting is indicated using ``'.'`` to delimit levels, e.g., "Archived.2005.07". - .. note:: + .. attribute:: Maildir.colon The Maildir specification requires the use of a colon (``':'``) in certain message file names. However, some operating systems do not permit this @@ -346,9 +362,9 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. import mailbox mailbox.Maildir.colon = '!' - The :attr:`colon` attribute may also be set on a per-instance basis. + The :attr:`!colon` attribute may also be set on a per-instance basis. - :class:`Maildir` instances have all of the methods of :class:`Mailbox` in + :class:`!Maildir` instances have all of the methods of :class:`Mailbox` in addition to the following: @@ -359,14 +375,14 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. .. method:: get_folder(folder) - Return a :class:`Maildir` instance representing the folder whose name is + Return a :class:`!Maildir` instance representing the folder whose name is *folder*. A :exc:`NoSuchMailboxError` exception is raised if the folder does not exist. .. method:: add_folder(folder) - Create a folder whose name is *folder* and return a :class:`Maildir` + Create a folder whose name is *folder* and return a :class:`!Maildir` instance representing it. @@ -485,7 +501,7 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. .. versionadded:: 3.13 - Some :class:`Mailbox` methods implemented by :class:`Maildir` deserve special + Some :class:`Mailbox` methods implemented by :class:`!Maildir` deserve special remarks: @@ -516,7 +532,7 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. .. method:: close() - :class:`Maildir` instances do not keep any open files and the underlying + :class:`!Maildir` instances do not keep any open files and the underlying mailboxes do not support locking, so this method does nothing. @@ -539,8 +555,8 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. .. _mailbox-mbox: -:class:`mbox` -^^^^^^^^^^^^^ +:class:`!mbox` objects +^^^^^^^^^^^^^^^^^^^^^^ .. class:: mbox(path, factory=None, create=True) @@ -557,22 +573,22 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. each message indicated by a line whose first five characters are "From ". Several variations of the mbox format exist to address perceived shortcomings in - the original. In the interest of compatibility, :class:`mbox` implements the + the original. In the interest of compatibility, :class:`!mbox` implements the original format, which is sometimes referred to as :dfn:`mboxo`. This means that the :mailheader:`Content-Length` header, if present, is ignored and that any occurrences of "From " at the beginning of a line in a message body are transformed to ">From " when storing the message, although occurrences of ">From " are not transformed to "From " when reading the message. - Some :class:`Mailbox` methods implemented by :class:`mbox` deserve special + Some :class:`Mailbox` methods implemented by :class:`!mbox` deserve special remarks: .. method:: get_file(key) - Using the file after calling :meth:`flush` or :meth:`close` on the - :class:`mbox` instance may yield unpredictable results or raise an - exception. + Using the file after calling :meth:`~Mailbox.flush` or + :meth:`~Mailbox.close` on the :class:`!mbox` instance may yield + unpredictable results or raise an exception. .. method:: lock() @@ -596,8 +612,8 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. .. _mailbox-mh: -:class:`MH` -^^^^^^^^^^^ +:class:`!MH` objects +^^^^^^^^^^^^^^^^^^^^ .. class:: MH(path, factory=None, create=True) @@ -617,12 +633,12 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. messages without moving them to sub-folders. Sequences are defined in a file called :file:`.mh_sequences` in each folder. - The :class:`MH` class manipulates MH mailboxes, but it does not attempt to + The :class:`!MH` class manipulates MH mailboxes, but it does not attempt to emulate all of :program:`mh`'s behaviors. In particular, it does not modify and is not affected by the :file:`context` or :file:`.mh_profile` files that are used by :program:`mh` to store its state and configuration. - :class:`MH` instances have all of the methods of :class:`Mailbox` in addition + :class:`!MH` instances have all of the methods of :class:`Mailbox` in addition to the following: @@ -633,14 +649,14 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. .. method:: get_folder(folder) - Return an :class:`MH` instance representing the folder whose name is + Return an :class:`!MH` instance representing the folder whose name is *folder*. A :exc:`NoSuchMailboxError` exception is raised if the folder does not exist. .. method:: add_folder(folder) - Create a folder whose name is *folder* and return an :class:`MH` instance + Create a folder whose name is *folder* and return an :class:`!MH` instance representing it. @@ -674,7 +690,7 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. Already-issued keys are invalidated by this operation and should not be subsequently used. - Some :class:`Mailbox` methods implemented by :class:`MH` deserve special + Some :class:`Mailbox` methods implemented by :class:`!MH` deserve special remarks: @@ -710,7 +726,7 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. .. method:: close() - :class:`MH` instances do not keep any open files, so this method is + :class:`!MH` instances do not keep any open files, so this method is equivalent to :meth:`unlock`. @@ -726,8 +742,8 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. .. _mailbox-babyl: -:class:`Babyl` -^^^^^^^^^^^^^^ +:class:`!Babyl` objects +^^^^^^^^^^^^^^^^^^^^^^^ .. class:: Babyl(path, factory=None, create=True) @@ -754,7 +770,7 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. message, and a list of all user-defined labels found in the mailbox is kept in the Babyl options section. - :class:`Babyl` instances have all of the methods of :class:`Mailbox` in + :class:`!Babyl` instances have all of the methods of :class:`Mailbox` in addition to the following: @@ -769,7 +785,7 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. options section, but the Babyl section is updated whenever the mailbox is modified. - Some :class:`Mailbox` methods implemented by :class:`Babyl` deserve special + Some :class:`Mailbox` methods implemented by :class:`!Babyl` deserve special remarks: @@ -802,8 +818,8 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. .. _mailbox-mmdf: -:class:`MMDF` -^^^^^^^^^^^^^ +:class:`!MMDF` objects +^^^^^^^^^^^^^^^^^^^^^^ .. class:: MMDF(path, factory=None, create=True) @@ -824,15 +840,15 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. ">From " when storing messages because the extra message separator lines prevent mistaking such occurrences for the starts of subsequent messages. - Some :class:`Mailbox` methods implemented by :class:`MMDF` deserve special + Some :class:`Mailbox` methods implemented by :class:`!MMDF` deserve special remarks: .. method:: get_file(key) - Using the file after calling :meth:`flush` or :meth:`close` on the - :class:`MMDF` instance may yield unpredictable results or raise an - exception. + Using the file after calling :meth:`~Mailbox.flush` or + :meth:`~Mailbox.close` on the :class:`!MMDF` instance may yield + unpredictable results or raise an exception. .. method:: lock() @@ -854,20 +870,20 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. .. _mailbox-message-objects: -:class:`Message` objects ------------------------- +:class:`!Message` objects +------------------------- .. class:: Message(message=None) A subclass of the :mod:`email.message` module's - :class:`~email.message.Message`. Subclasses of :class:`mailbox.Message` add + :class:`~email.message.Message`. Subclasses of :class:`!mailbox.Message` add mailbox-format-specific state and behavior. If *message* is omitted, the new instance is created in a default, empty state. If *message* is an :class:`email.message.Message` instance, its contents are copied; furthermore, any format-specific information is converted insofar as - possible if *message* is a :class:`Message` instance. If *message* is a string, + possible if *message* is a :class:`!Message` instance. If *message* is a string, a byte string, or a file, it should contain an :rfc:`2822`\ -compliant message, which is read and parsed. Files should be open in binary mode, but text mode files @@ -882,18 +898,18 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. such as whether a message has been read by the user or marked as important is retained, because it applies to the message itself. - There is no requirement that :class:`Message` instances be used to represent + There is no requirement that :class:`!Message` instances be used to represent messages retrieved using :class:`Mailbox` instances. In some situations, the - time and memory required to generate :class:`Message` representations might - not be acceptable. For such situations, :class:`Mailbox` instances also + time and memory required to generate :class:`!Message` representations might + not be acceptable. For such situations, :class:`!Mailbox` instances also offer string and file-like representations, and a custom message factory may - be specified when a :class:`Mailbox` instance is initialized. + be specified when a :class:`!Mailbox` instance is initialized. .. _mailbox-maildirmessage: -:class:`MaildirMessage` -^^^^^^^^^^^^^^^^^^^^^^^ +:class:`!MaildirMessage` objects +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. class:: MaildirMessage(message=None) @@ -928,7 +944,7 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. | T | Trashed | Marked for subsequent deletion | +------+---------+--------------------------------+ - :class:`MaildirMessage` instances offer the following methods: + :class:`!MaildirMessage` instances offer the following methods: .. method:: get_subdir() @@ -1005,7 +1021,7 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. Set "info" to *info*, which should be a string. -When a :class:`MaildirMessage` instance is created based upon an +When a :class:`!MaildirMessage` instance is created based upon an :class:`mboxMessage` or :class:`MMDFMessage` instance, the :mailheader:`Status` and :mailheader:`X-Status` headers are omitted and the following conversions take place: @@ -1025,7 +1041,7 @@ take place: | T flag | D flag | +--------------------+----------------------------------------------+ -When a :class:`MaildirMessage` instance is created based upon an +When a :class:`!MaildirMessage` instance is created based upon an :class:`MHMessage` instance, the following conversions take place: +-------------------------------+--------------------------+ @@ -1040,7 +1056,7 @@ When a :class:`MaildirMessage` instance is created based upon an | R flag | "replied" sequence | +-------------------------------+--------------------------+ -When a :class:`MaildirMessage` instance is created based upon a +When a :class:`!MaildirMessage` instance is created based upon a :class:`BabylMessage` instance, the following conversions take place: +-------------------------------+-------------------------------+ @@ -1060,8 +1076,8 @@ When a :class:`MaildirMessage` instance is created based upon a .. _mailbox-mboxmessage: -:class:`mboxMessage` -^^^^^^^^^^^^^^^^^^^^ +:class:`!mboxMessage` objects +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. class:: mboxMessage(message=None) @@ -1097,7 +1113,7 @@ When a :class:`MaildirMessage` instance is created based upon a "D", "F", and "A" flags are stored in the :mailheader:`X-Status` header. The flags and headers typically appear in the order mentioned. - :class:`mboxMessage` instances offer the following methods: + :class:`!mboxMessage` instances offer the following methods: .. method:: get_from() @@ -1145,7 +1161,7 @@ When a :class:`MaildirMessage` instance is created based upon a remove more than one flag at a time, *flag* maybe a string of more than one character. -When an :class:`mboxMessage` instance is created based upon a +When an :class:`!mboxMessage` instance is created based upon a :class:`MaildirMessage` instance, a "From " line is generated based upon the :class:`MaildirMessage` instance's delivery date, and the following conversions take place: @@ -1164,7 +1180,7 @@ take place: | A flag | R flag | +-----------------+-------------------------------+ -When an :class:`mboxMessage` instance is created based upon an +When an :class:`!mboxMessage` instance is created based upon an :class:`MHMessage` instance, the following conversions take place: +-------------------+--------------------------+ @@ -1179,7 +1195,7 @@ When an :class:`mboxMessage` instance is created based upon an | A flag | "replied" sequence | +-------------------+--------------------------+ -When an :class:`mboxMessage` instance is created based upon a +When an :class:`!mboxMessage` instance is created based upon a :class:`BabylMessage` instance, the following conversions take place: +-------------------+-----------------------------+ @@ -1194,7 +1210,8 @@ When an :class:`mboxMessage` instance is created based upon a | A flag | "answered" label | +-------------------+-----------------------------+ -When a :class:`Message` instance is created based upon an :class:`MMDFMessage` +When a :class:`!mboxMessage` instance is created based upon an +:class:`MMDFMessage` instance, the "From " line is copied and all flags directly correspond: +-----------------+----------------------------+ @@ -1214,8 +1231,8 @@ instance, the "From " line is copied and all flags directly correspond: .. _mailbox-mhmessage: -:class:`MHMessage` -^^^^^^^^^^^^^^^^^^ +:class:`!MHMessage` objects +^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. class:: MHMessage(message=None) @@ -1239,7 +1256,7 @@ instance, the "From " line is copied and all flags directly correspond: | flagged | Marked as important | +----------+------------------------------------------+ - :class:`MHMessage` instances offer the following methods: + :class:`!MHMessage` instances offer the following methods: .. method:: get_sequences() @@ -1261,7 +1278,7 @@ instance, the "From " line is copied and all flags directly correspond: Remove *sequence* from the list of sequences that include this message. -When an :class:`MHMessage` instance is created based upon a +When an :class:`!MHMessage` instance is created based upon a :class:`MaildirMessage` instance, the following conversions take place: +--------------------+-------------------------------+ @@ -1274,7 +1291,7 @@ When an :class:`MHMessage` instance is created based upon a | "flagged" sequence | F flag | +--------------------+-------------------------------+ -When an :class:`MHMessage` instance is created based upon an +When an :class:`!MHMessage` instance is created based upon an :class:`mboxMessage` or :class:`MMDFMessage` instance, the :mailheader:`Status` and :mailheader:`X-Status` headers are omitted and the following conversions take place: @@ -1290,7 +1307,7 @@ take place: | "flagged" sequence | F flag | +--------------------+----------------------------------------------+ -When an :class:`MHMessage` instance is created based upon a +When an :class:`!MHMessage` instance is created based upon a :class:`BabylMessage` instance, the following conversions take place: +--------------------+-----------------------------+ @@ -1304,8 +1321,8 @@ When an :class:`MHMessage` instance is created based upon a .. _mailbox-babylmessage: -:class:`BabylMessage` -^^^^^^^^^^^^^^^^^^^^^ +:class:`!BabylMessage` objects +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. class:: BabylMessage(message=None) @@ -1334,11 +1351,11 @@ When an :class:`MHMessage` instance is created based upon a | resent | Resent | +-----------+------------------------------------------+ - By default, Rmail displays only visible headers. The :class:`BabylMessage` + By default, Rmail displays only visible headers. The :class:`!BabylMessage` class, though, uses the original headers because they are more complete. Visible headers may be accessed explicitly if desired. - :class:`BabylMessage` instances offer the following methods: + :class:`!BabylMessage` instances offer the following methods: .. method:: get_labels() @@ -1377,7 +1394,7 @@ When an :class:`MHMessage` instance is created based upon a .. method:: update_visible() - When a :class:`BabylMessage` instance's original headers are modified, the + When a :class:`!BabylMessage` instance's original headers are modified, the visible headers are not automatically modified to correspond. This method updates the visible headers as follows: each visible header with a corresponding original header is set to the value of the original header, @@ -1387,7 +1404,7 @@ When an :class:`MHMessage` instance is created based upon a present in the original headers but not the visible headers are added to the visible headers. -When a :class:`BabylMessage` instance is created based upon a +When a :class:`!BabylMessage` instance is created based upon a :class:`MaildirMessage` instance, the following conversions take place: +-------------------+-------------------------------+ @@ -1402,7 +1419,7 @@ When a :class:`BabylMessage` instance is created based upon a | "forwarded" label | P flag | +-------------------+-------------------------------+ -When a :class:`BabylMessage` instance is created based upon an +When a :class:`!BabylMessage` instance is created based upon an :class:`mboxMessage` or :class:`MMDFMessage` instance, the :mailheader:`Status` and :mailheader:`X-Status` headers are omitted and the following conversions take place: @@ -1418,7 +1435,7 @@ take place: | "answered" label | A flag | +------------------+----------------------------------------------+ -When a :class:`BabylMessage` instance is created based upon an +When a :class:`!BabylMessage` instance is created based upon an :class:`MHMessage` instance, the following conversions take place: +------------------+--------------------------+ @@ -1432,8 +1449,8 @@ When a :class:`BabylMessage` instance is created based upon an .. _mailbox-mmdfmessage: -:class:`MMDFMessage` -^^^^^^^^^^^^^^^^^^^^ +:class:`!MMDFMessage` objects +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. class:: MMDFMessage(message=None) @@ -1467,7 +1484,7 @@ When a :class:`BabylMessage` instance is created based upon an "D", "F", and "A" flags are stored in the :mailheader:`X-Status` header. The flags and headers typically appear in the order mentioned. - :class:`MMDFMessage` instances offer the following methods, which are + :class:`!MMDFMessage` instances offer the following methods, which are identical to those offered by :class:`mboxMessage`: @@ -1516,7 +1533,7 @@ When a :class:`BabylMessage` instance is created based upon an remove more than one flag at a time, *flag* maybe a string of more than one character. -When an :class:`MMDFMessage` instance is created based upon a +When an :class:`!MMDFMessage` instance is created based upon a :class:`MaildirMessage` instance, a "From " line is generated based upon the :class:`MaildirMessage` instance's delivery date, and the following conversions take place: @@ -1535,7 +1552,7 @@ take place: | A flag | R flag | +-----------------+-------------------------------+ -When an :class:`MMDFMessage` instance is created based upon an +When an :class:`!MMDFMessage` instance is created based upon an :class:`MHMessage` instance, the following conversions take place: +-------------------+--------------------------+ @@ -1550,7 +1567,7 @@ When an :class:`MMDFMessage` instance is created based upon an | A flag | "replied" sequence | +-------------------+--------------------------+ -When an :class:`MMDFMessage` instance is created based upon a +When an :class:`!MMDFMessage` instance is created based upon a :class:`BabylMessage` instance, the following conversions take place: +-------------------+-----------------------------+ @@ -1565,7 +1582,7 @@ When an :class:`MMDFMessage` instance is created based upon a | A flag | "answered" label | +-------------------+-----------------------------+ -When an :class:`MMDFMessage` instance is created based upon an +When an :class:`!MMDFMessage` instance is created based upon an :class:`mboxMessage` instance, the "From " line is copied and all flags directly correspond: @@ -1587,7 +1604,7 @@ correspond: Exceptions ---------- -The following exception classes are defined in the :mod:`mailbox` module: +The following exception classes are defined in the :mod:`!mailbox` module: .. exception:: Error() diff --git a/Doc/library/numbers.rst b/Doc/library/numbers.rst index 2a05b56db051f9..17d1a275f04c9b 100644 --- a/Doc/library/numbers.rst +++ b/Doc/library/numbers.rst @@ -8,7 +8,7 @@ -------------- -The :mod:`numbers` module (:pep:`3141`) defines a hierarchy of numeric +The :mod:`!numbers` module (:pep:`3141`) defines a hierarchy of numeric :term:`abstract base classes ` which progressively define more operations. None of the types defined in this module are intended to be instantiated. @@ -45,7 +45,7 @@ The numeric tower .. class:: Real - To :class:`Complex`, :class:`Real` adds the operations that work on real + To :class:`Complex`, :class:`!Real` adds the operations that work on real numbers. In short, those are: a conversion to :class:`float`, :func:`math.trunc`, @@ -126,7 +126,8 @@ We want to implement the arithmetic operations so that mixed-mode operations either call an implementation whose author knew about the types of both arguments, or convert both to the nearest built in type and do the operation there. For subtypes of :class:`Integral`, this -means that :meth:`__add__` and :meth:`__radd__` should be defined as:: +means that :meth:`~object.__add__` and :meth:`~object.__radd__` should be +defined as:: class MyIntegral(Integral): @@ -160,15 +161,15 @@ refer to ``MyIntegral`` and ``OtherTypeIKnowAbout`` as of :class:`Complex` (``a : A <: Complex``), and ``b : B <: Complex``. I'll consider ``a + b``: -1. If ``A`` defines an :meth:`__add__` which accepts ``b``, all is +1. If ``A`` defines an :meth:`~object.__add__` which accepts ``b``, all is well. 2. If ``A`` falls back to the boilerplate code, and it were to - return a value from :meth:`__add__`, we'd miss the possibility - that ``B`` defines a more intelligent :meth:`__radd__`, so the + return a value from :meth:`~object.__add__`, we'd miss the possibility + that ``B`` defines a more intelligent :meth:`~object.__radd__`, so the boilerplate should return :const:`NotImplemented` from - :meth:`__add__`. (Or ``A`` may not implement :meth:`__add__` at + :meth:`!__add__`. (Or ``A`` may not implement :meth:`!__add__` at all.) -3. Then ``B``'s :meth:`__radd__` gets a chance. If it accepts +3. Then ``B``'s :meth:`~object.__radd__` gets a chance. If it accepts ``a``, all is well. 4. If it falls back to the boilerplate, there are no more possible methods to try, so this is where the default implementation @@ -180,7 +181,7 @@ Complex``. I'll consider ``a + b``: If ``A <: Complex`` and ``B <: Real`` without sharing any other knowledge, then the appropriate shared operation is the one involving the built -in :class:`complex`, and both :meth:`__radd__` s land there, so ``a+b +in :class:`complex`, and both :meth:`~object.__radd__` s land there, so ``a+b == b+a``. Because most of the operations on any given type will be very similar, diff --git a/Doc/library/os.rst b/Doc/library/os.rst index f4566a6684e14c..1138cc1f249ee7 100644 --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -4383,6 +4383,11 @@ written in Python, such as a mail server's external command delivery program. If you use TLS sockets in an application calling ``fork()``, see the warning in the :mod:`ssl` documentation. + .. warning:: + + On macOS the use of this function is unsafe when mixed with using + higher-level system APIs, and that includes using :mod:`urllib.request`. + .. versionchanged:: 3.8 Calling ``fork()`` in a subinterpreter is no longer supported (:exc:`RuntimeError` is raised). @@ -4422,6 +4427,11 @@ written in Python, such as a mail server's external command delivery program. .. audit-event:: os.forkpty "" os.forkpty + .. warning:: + + On macOS the use of this function is unsafe when mixed with using + higher-level system APIs, and that includes using :mod:`urllib.request`. + .. versionchanged:: 3.12 If Python is able to detect that your process has multiple threads, this now raises a :exc:`DeprecationWarning`. See the @@ -4560,7 +4570,8 @@ written in Python, such as a mail server's external command delivery program. Most users should use :func:`subprocess.run` instead of :func:`posix_spawn`. The positional-only arguments *path*, *args*, and *env* are similar to - :func:`execve`. + :func:`execve`. *env* is allowed to be ``None``, in which case current + process' environment is used. The *path* parameter is the path to the executable file. The *path* should contain a directory. Use :func:`posix_spawnp` to pass an executable file @@ -4590,10 +4601,17 @@ written in Python, such as a mail server's external command delivery program. Performs ``os.dup2(fd, new_fd)``. + .. data:: POSIX_SPAWN_CLOSEFROM + + (``os.POSIX_SPAWN_CLOSEFROM``, *fd*) + + Performs ``os.closerange(fd, INF)``. + These tuples correspond to the C library :c:func:`!posix_spawn_file_actions_addopen`, - :c:func:`!posix_spawn_file_actions_addclose`, and - :c:func:`!posix_spawn_file_actions_adddup2` API calls used to prepare + :c:func:`!posix_spawn_file_actions_addclose`, + :c:func:`!posix_spawn_file_actions_adddup2`, and + :c:func:`!posix_spawn_file_actions_addclosefrom_np` API calls used to prepare for the :c:func:`!posix_spawn` call itself. The *setpgroup* argument will set the process group of the child to the value @@ -4635,6 +4653,13 @@ written in Python, such as a mail server's external command delivery program. .. versionadded:: 3.8 + .. versionchanged:: 3.13 + *env* parameter accepts ``None``. + + .. versionchanged:: 3.13 + ``os.POSIX_SPAWN_CLOSEFROM`` is available on platforms where + :c:func:`!posix_spawn_file_actions_addclosefrom_np` exists. + .. availability:: Unix, not Emscripten, not WASI. .. function:: posix_spawnp(path, argv, env, *, file_actions=None, \ diff --git a/Doc/library/pty.rst b/Doc/library/pty.rst index af9378464edb9f..bd2f5ed45cb8b4 100644 --- a/Doc/library/pty.rst +++ b/Doc/library/pty.rst @@ -33,6 +33,9 @@ The :mod:`pty` module defines the following functions: file descriptor connected to the child's controlling terminal (and also to the child's standard input and output). + .. warning:: On macOS the use of this function is unsafe when mixed with using + higher-level system APIs, and that includes using :mod:`urllib.request`. + .. function:: openpty() diff --git a/Doc/library/readline.rst b/Doc/library/readline.rst index 2e0f45ced30b9c..1adafcaa02eab9 100644 --- a/Doc/library/readline.rst +++ b/Doc/library/readline.rst @@ -218,6 +218,8 @@ Startup hooks if Python was compiled for a version of the library that supports it. +.. _readline-completion: + Completion ---------- diff --git a/Doc/library/rlcompleter.rst b/Doc/library/rlcompleter.rst index 40b09ce897880e..8287699c5f013e 100644 --- a/Doc/library/rlcompleter.rst +++ b/Doc/library/rlcompleter.rst @@ -10,12 +10,14 @@ -------------- -The :mod:`rlcompleter` module defines a completion function suitable for the -:mod:`readline` module by completing valid Python identifiers and keywords. +The :mod:`!rlcompleter` module defines a completion function suitable to be +passed to :func:`~readline.set_completer` in the :mod:`readline` module. When this module is imported on a Unix platform with the :mod:`readline` module available, an instance of the :class:`Completer` class is automatically created -and its :meth:`complete` method is set as the :mod:`readline` completer. +and its :meth:`~Completer.complete` method is set as the +:ref:`readline completer `. The method provides +completion of valid Python :ref:`identifiers and keywords `. Example:: @@ -28,7 +30,7 @@ Example:: readline.__name__ readline.parse_and_bind( >>> readline. -The :mod:`rlcompleter` module is designed for use with Python's +The :mod:`!rlcompleter` module is designed for use with Python's :ref:`interactive mode `. Unless Python is run with the :option:`-S` option, the module is automatically imported and configured (see :ref:`rlcompleter-config`). @@ -39,23 +41,25 @@ this module can still be used for custom purposes. .. _completer-objects: -Completer Objects ------------------ +.. class:: Completer -Completer objects have the following method: + Completer objects have the following method: + .. method:: Completer.complete(text, state) -.. method:: Completer.complete(text, state) + Return the next possible completion for *text*. - Return the *state*\ th completion for *text*. + When called by the :mod:`readline` module, this method is called + successively with ``state == 0, 1, 2, ...`` until the method returns + ``None``. - If called for *text* that doesn't include a period character (``'.'``), it will - complete from names currently defined in :mod:`__main__`, :mod:`builtins` and - keywords (as defined by the :mod:`keyword` module). - - If called for a dotted name, it will try to evaluate anything without obvious - side-effects (functions will not be evaluated, but it can generate calls to - :meth:`__getattr__`) up to the last part, and find matches for the rest via the - :func:`dir` function. Any exception raised during the evaluation of the - expression is caught, silenced and :const:`None` is returned. + If called for *text* that doesn't include a period character (``'.'``), it will + complete from names currently defined in :mod:`__main__`, :mod:`builtins` and + keywords (as defined by the :mod:`keyword` module). + If called for a dotted name, it will try to evaluate anything without obvious + side-effects (functions will not be evaluated, but it can generate calls to + :meth:`~object.__getattr__`) up to the last part, and find matches for the + rest via the :func:`dir` function. Any exception raised during the + evaluation of the expression is caught, silenced and :const:`None` is + returned. diff --git a/Doc/library/tarfile.rst b/Doc/library/tarfile.rst index 3e5723a66780ca..f4e83d64bb1580 100644 --- a/Doc/library/tarfile.rst +++ b/Doc/library/tarfile.rst @@ -116,7 +116,7 @@ Some facts and figures: ``'filemode|[compression]'``. :func:`tarfile.open` will return a :class:`TarFile` object that processes its data as a stream of blocks. No random seeking will be done on the file. If given, *fileobj* may be any object that has a - :meth:`read` or :meth:`write` method (depending on the *mode*). *bufsize* + :meth:`~io.TextIOBase.read` or :meth:`~io.TextIOBase.write` method (depending on the *mode*). *bufsize* specifies the blocksize and defaults to ``20 * 512`` bytes. Use this variant in combination with e.g. ``sys.stdin``, a socket :term:`file object` or a tape device. However, such a :class:`TarFile` object is limited in that it does @@ -255,6 +255,51 @@ The following constants are available at the module level: The default character encoding: ``'utf-8'`` on Windows, the value returned by :func:`sys.getfilesystemencoding` otherwise. +.. data:: REGTYPE + AREGTYPE + + A regular file :attr:`~TarInfo.type`. + +.. data:: LNKTYPE + + A link (inside tarfile) :attr:`~TarInfo.type`. + +.. data:: SYMTYPE + + A symbolic link :attr:`~TarInfo.type`. + +.. data:: CHRTYPE + + A character special device :attr:`~TarInfo.type`. + +.. data:: BLKTYPE + + A block special device :attr:`~TarInfo.type`. + +.. data:: DIRTYPE + + A directory :attr:`~TarInfo.type`. + +.. data:: FIFOTYPE + + A FIFO special device :attr:`~TarInfo.type`. + +.. data:: CONTTYPE + + A contiguous file :attr:`~TarInfo.type`. + +.. data:: GNUTYPE_LONGNAME + + A GNU tar longname :attr:`~TarInfo.type`. + +.. data:: GNUTYPE_LONGLINK + + A GNU tar longlink :attr:`~TarInfo.type`. + +.. data:: GNUTYPE_SPARSE + + A GNU tar sparse file :attr:`~TarInfo.type`. + Each of the following constants defines a tar archive format that the :mod:`tarfile` module is able to create. See section :ref:`tar-formats` for @@ -325,7 +370,7 @@ be finalized; only the internally used file object will be closed. See the *name* is the pathname of the archive. *name* may be a :term:`path-like object`. It can be omitted if *fileobj* is given. - In this case, the file object's :attr:`name` attribute is used if it exists. + In this case, the file object's :attr:`!name` attribute is used if it exists. *mode* is either ``'r'`` to read from an existing archive, ``'a'`` to append data to an existing file, ``'w'`` to create a new file overwriting an existing @@ -359,7 +404,7 @@ be finalized; only the internally used file object will be closed. See the messages). The messages are written to ``sys.stderr``. *errorlevel* controls how extraction errors are handled, - see :attr:`the corresponding attribute <~TarFile.errorlevel>`. + see :attr:`the corresponding attribute `. The *encoding* and *errors* arguments define the character encoding to be used for reading or writing the archive and how conversion errors are going @@ -645,8 +690,8 @@ It does *not* contain the file's data itself. :meth:`~TarFile.getmember`, :meth:`~TarFile.getmembers` and :meth:`~TarFile.gettarinfo`. -Modifying the objects returned by :meth:`~!TarFile.getmember` or -:meth:`~!TarFile.getmembers` will affect all subsequent +Modifying the objects returned by :meth:`~TarFile.getmember` or +:meth:`~TarFile.getmembers` will affect all subsequent operations on the archive. For cases where this is unwanted, you can use :mod:`copy.copy() ` or call the :meth:`~TarInfo.replace` method to create a modified copy in one step. @@ -795,8 +840,8 @@ A ``TarInfo`` object has the following public data attributes: A dictionary containing key-value pairs of an associated pax extended header. -.. method:: TarInfo.replace(name=..., mtime=..., mode=..., linkname=..., - uid=..., gid=..., uname=..., gname=..., +.. method:: TarInfo.replace(name=..., mtime=..., mode=..., linkname=..., \ + uid=..., gid=..., uname=..., gname=..., \ deep=True) .. versionadded:: 3.12 @@ -816,7 +861,7 @@ A :class:`TarInfo` object also provides some convenient query methods: .. method:: TarInfo.isfile() - Return :const:`True` if the :class:`Tarinfo` object is a regular file. + Return :const:`True` if the :class:`TarInfo` object is a regular file. .. method:: TarInfo.isreg() @@ -952,7 +997,7 @@ reused in custom filters: path (after following symlinks) would end up outside the destination. This raises :class:`~tarfile.OutsideDestinationError`. - Clear high mode bits (setuid, setgid, sticky) and group/other write bits - (:const:`~stat.S_IWGRP`|:const:`~stat.S_IWOTH`). + (:const:`~stat.S_IWGRP` | :const:`~stat.S_IWOTH`). Return the modified ``TarInfo`` member. @@ -977,9 +1022,9 @@ reused in custom filters: - For regular files, including hard links: - Set the owner read and write permissions - (:const:`~stat.S_IRUSR`|:const:`~stat.S_IWUSR`). + (:const:`~stat.S_IRUSR` | :const:`~stat.S_IWUSR`). - Remove the group & other executable permission - (:const:`~stat.S_IXGRP`|:const:`~stat.S_IXOTH`) + (:const:`~stat.S_IXGRP` | :const:`~stat.S_IXOTH`) if the owner doesn’t have it (:const:`~stat.S_IXUSR`). - For other files (directories), set ``mode`` to ``None``, so diff --git a/Doc/library/test.rst b/Doc/library/test.rst index 6cbdc39b3c024d..9173db07fd0071 100644 --- a/Doc/library/test.rst +++ b/Doc/library/test.rst @@ -1408,7 +1408,8 @@ The :mod:`test.support.os_helper` module provides support for os tests. .. class:: FakePath(path) - Simple :term:`path-like object`. It implements the :meth:`__fspath__` + Simple :term:`path-like object`. It implements the + :meth:`~os.PathLike.__fspath__` method which just returns the *path* argument. If *path* is an exception, it will be raised in :meth:`!__fspath__`. diff --git a/Doc/library/tkinter.ttk.rst b/Doc/library/tkinter.ttk.rst index 6e01ec7b291255..1609dc2ce9218e 100644 --- a/Doc/library/tkinter.ttk.rst +++ b/Doc/library/tkinter.ttk.rst @@ -986,19 +986,19 @@ ttk.Treeview The valid options/values are: - id + *id* Returns the column name. This is a read-only option. - anchor: One of the standard Tk anchor values. + *anchor*: One of the standard Tk anchor values. Specifies how the text in this column should be aligned with respect to the cell. - minwidth: width + *minwidth*: width The minimum width of the column in pixels. The treeview widget will not make the column any smaller than specified by this option when the widget is resized or the user drags a column. - stretch: ``True``/``False`` + *stretch*: ``True``/``False`` Specifies whether the column's width should be adjusted when the widget is resized. - width: width + *width*: width The width of the column in pixels. To configure the tree column, call this with column = "#0" @@ -1041,14 +1041,14 @@ ttk.Treeview The valid options/values are: - text: text + *text*: text The text to display in the column heading. - image: imageName + *image*: imageName Specifies an image to display to the right of the column heading. - anchor: anchor + *anchor*: anchor Specifies how the heading text should be aligned. One of the standard Tk anchor values. - command: callback + *command*: callback A callback to be invoked when the heading label is pressed. To configure the tree column heading, call this with column = "#0". @@ -1573,23 +1573,24 @@ Layouts A layout can be just ``None``, if it takes no options, or a dict of options specifying how to arrange the element. The layout mechanism uses a simplified version of the pack geometry manager: given an -initial cavity, each element is allocated a parcel. Valid -options/values are: +initial cavity, each element is allocated a parcel. -side: whichside +The valid options/values are: + +*side*: whichside Specifies which side of the cavity to place the element; one of top, right, bottom or left. If omitted, the element occupies the entire cavity. -sticky: nswe +*sticky*: nswe Specifies where the element is placed inside its allocated parcel. -unit: 0 or 1 +*unit*: 0 or 1 If set to 1, causes the element and all of its descendants to be treated as a single element for the purposes of :meth:`Widget.identify` et al. It's used for things like scrollbar thumbs with grips. -children: [sublayout... ] +*children*: [sublayout... ] Specifies a list of elements to place inside the element. Each element is a tuple (or other sequence type) where the first item is the layout name, and the other is a `Layout`_. diff --git a/Doc/library/tomllib.rst b/Doc/library/tomllib.rst index 918576eb37eaee..f9e2dfeb13dc87 100644 --- a/Doc/library/tomllib.rst +++ b/Doc/library/tomllib.rst @@ -95,7 +95,7 @@ Conversion Table +------------------+--------------------------------------------------------------------------------------+ | TOML | Python | +==================+======================================================================================+ -| table | dict | +| TOML document | dict | +------------------+--------------------------------------------------------------------------------------+ | string | str | +------------------+--------------------------------------------------------------------------------------+ @@ -115,3 +115,9 @@ Conversion Table +------------------+--------------------------------------------------------------------------------------+ | array | list | +------------------+--------------------------------------------------------------------------------------+ +| table | dict | ++------------------+--------------------------------------------------------------------------------------+ +| inline table | dict | ++------------------+--------------------------------------------------------------------------------------+ +| array of tables | list of dicts | ++------------------+--------------------------------------------------------------------------------------+ diff --git a/Doc/library/unittest.mock.rst b/Doc/library/unittest.mock.rst index d6ac4d09300d9f..f1cc482c5cfe2a 100644 --- a/Doc/library/unittest.mock.rst +++ b/Doc/library/unittest.mock.rst @@ -824,8 +824,9 @@ apply to method calls on the mock object. .. class:: PropertyMock(*args, **kwargs) - A mock intended to be used as a property, or other descriptor, on a class. - :class:`PropertyMock` provides :meth:`__get__` and :meth:`__set__` methods + A mock intended to be used as a :class:`property`, or other + :term:`descriptor`, on a class. :class:`PropertyMock` provides + :meth:`~object.__get__` and :meth:`~object.__set__` methods so you can specify a return value when it is fetched. Fetching a :class:`PropertyMock` instance from an object calls the mock, with @@ -1707,8 +1708,9 @@ Keywords can be used in the :func:`patch.dict` call to set values in the diction :func:`patch.dict` can be used with dictionary like objects that aren't actually dictionaries. At the very minimum they must support item getting, setting, deleting and either iteration or membership test. This corresponds to the -magic methods :meth:`~object.__getitem__`, :meth:`__setitem__`, :meth:`__delitem__` and either -:meth:`__iter__` or :meth:`__contains__`. +magic methods :meth:`~object.__getitem__`, :meth:`~object.__setitem__`, +:meth:`~object.__delitem__` and either :meth:`~container.__iter__` or +:meth:`~object.__contains__`. >>> class Container: ... def __init__(self): @@ -2171,7 +2173,7 @@ For example: >>> object() in mock False -The two equality methods, :meth:`__eq__` and :meth:`__ne__`, are special. +The two equality methods, :meth:`!__eq__` and :meth:`!__ne__`, are special. They do the default equality comparison on identity, using the :attr:`~Mock.side_effect` attribute, unless you change their return value to return something else:: @@ -2521,8 +2523,8 @@ mock_open *read_data* is now reset on each call to the *mock*. .. versionchanged:: 3.8 - Added :meth:`__iter__` to implementation so that iteration (such as in for - loops) correctly consumes *read_data*. + Added :meth:`~container.__iter__` to implementation so that iteration + (such as in for loops) correctly consumes *read_data*. Using :func:`open` as a context manager is a great way to ensure your file handles are closed properly and is becoming common:: @@ -2704,7 +2706,7 @@ able to use autospec. On the other hand it is much better to design your objects so that introspection is safe [#]_. A more serious problem is that it is common for instance attributes to be -created in the :meth:`__init__` method and not to exist on the class at all. +created in the :meth:`~object.__init__` method and not to exist on the class at all. *autospec* can't know about any dynamically created attributes and restricts the api to visible attributes. :: @@ -2745,8 +2747,9 @@ this particular scenario: AttributeError: Mock object has no attribute 'a' Probably the best way of solving the problem is to add class attributes as -default values for instance members initialised in :meth:`__init__`. Note that if -you are only setting default attributes in :meth:`__init__` then providing them via +default values for instance members initialised in :meth:`~object.__init__`. +Note that if +you are only setting default attributes in :meth:`!__init__` then providing them via class attributes (shared between instances of course) is faster too. e.g. .. code-block:: python diff --git a/Doc/library/unittest.rst b/Doc/library/unittest.rst index c04e6c378cc3b1..70b4c84c05f818 100644 --- a/Doc/library/unittest.rst +++ b/Doc/library/unittest.rst @@ -346,8 +346,8 @@ the `load_tests protocol`_. ``python -m unittest discover -s root/namespace -t root``). .. versionchanged:: 3.11 - Python 3.11 dropped the :term:`namespace packages ` - support. It has been broken since Python 3.7. Start directory and + :mod:`unittest` dropped the :term:`namespace packages ` + support in Python 3.11. It has been broken since Python 3.7. Start directory and subdirectories containing tests must be regular package that have ``__init__.py`` file. @@ -1733,7 +1733,7 @@ Grouping tests .. method:: __iter__() Tests grouped by a :class:`TestSuite` are always accessed by iteration. - Subclasses can lazily provide tests by overriding :meth:`__iter__`. Note + Subclasses can lazily provide tests by overriding :meth:`!__iter__`. Note that this method may be called several times on a single suite (for example when counting tests or comparing for equality) so the tests returned by repeated iterations before :meth:`TestSuite.run` must be the @@ -1744,7 +1744,7 @@ Grouping tests .. versionchanged:: 3.2 In earlier versions the :class:`TestSuite` accessed tests directly rather - than through iteration, so overriding :meth:`__iter__` wasn't sufficient + than through iteration, so overriding :meth:`!__iter__` wasn't sufficient for providing tests. .. versionchanged:: 3.4 diff --git a/Doc/library/urllib.request.rst b/Doc/library/urllib.request.rst index 040e28e9124014..0e18db73280a63 100644 --- a/Doc/library/urllib.request.rst +++ b/Doc/library/urllib.request.rst @@ -21,6 +21,14 @@ authentication, redirections, cookies and more. The `Requests package `_ is recommended for a higher-level HTTP client interface. +.. warning:: + + On macOS it is unsafe to use this module in programs using + :func:`os.fork` because the :func:`getproxies` implementation for + macOS uses a higher-level system API. Set the environment variable + ``no_proxy`` to ``*`` to avoid this problem + (e.g. ``os.environ["no_proxy"] = "*"``). + .. include:: ../includes/wasm-notavail.rst The :mod:`urllib.request` module defines the following functions: diff --git a/Doc/library/wsgiref.rst b/Doc/library/wsgiref.rst index be9e56b04c1fbf..c2b0ba7046967e 100644 --- a/Doc/library/wsgiref.rst +++ b/Doc/library/wsgiref.rst @@ -201,8 +201,9 @@ manipulation of WSGI response headers using a mapping-like interface. an empty list. :class:`Headers` objects support typical mapping operations including - :meth:`~object.__getitem__`, :meth:`get`, :meth:`__setitem__`, :meth:`setdefault`, - :meth:`__delitem__` and :meth:`__contains__`. For each of + :meth:`~object.__getitem__`, :meth:`~dict.get`, :meth:`~object.__setitem__`, + :meth:`~dict.setdefault`, + :meth:`~object.__delitem__` and :meth:`~object.__contains__`. For each of these methods, the key is the header name (treated case-insensitively), and the value is the first value associated with that header name. Setting a header deletes any existing values for that header, then adds a new value at the end of @@ -520,8 +521,10 @@ input, output, and error streams. want to subclass this instead of :class:`BaseCGIHandler`. This class is a subclass of :class:`BaseHandler`. It overrides the - :meth:`__init__`, :meth:`get_stdin`, :meth:`get_stderr`, :meth:`add_cgi_vars`, - :meth:`_write`, and :meth:`_flush` methods to support explicitly setting the + :meth:`!__init__`, :meth:`~BaseHandler.get_stdin`, + :meth:`~BaseHandler.get_stderr`, :meth:`~BaseHandler.add_cgi_vars`, + :meth:`~BaseHandler._write`, and :meth:`~BaseHandler._flush` methods to + support explicitly setting the environment and streams via the constructor. The supplied environment and streams are stored in the :attr:`stdin`, :attr:`stdout`, :attr:`stderr`, and :attr:`environ` attributes. diff --git a/Doc/library/xmlrpc.client.rst b/Doc/library/xmlrpc.client.rst index 146c4fd768233b..f7f23007fb0522 100644 --- a/Doc/library/xmlrpc.client.rst +++ b/Doc/library/xmlrpc.client.rst @@ -269,8 +269,9 @@ DateTime Objects Write the XML-RPC encoding of this :class:`DateTime` item to the *out* stream object. - It also supports certain of Python's built-in operators through rich comparison - and :meth:`__repr__` methods. + It also supports certain of Python's built-in operators through + :meth:`rich comparison ` and :meth:`~object.__repr__` + methods. A working example follows. The server code:: @@ -334,8 +335,8 @@ Binary Objects which was the de facto standard base64 specification when the XML-RPC spec was written. - It also supports certain of Python's built-in operators through :meth:`__eq__` - and :meth:`__ne__` methods. + It also supports certain of Python's built-in operators through + :meth:`~object.__eq__` and :meth:`~object.__ne__` methods. Example usage of the binary objects. We're going to transfer an image over XMLRPC:: diff --git a/Doc/reference/grammar.rst b/Doc/reference/grammar.rst index bc1db7b039cd5a..b9cca4444c9141 100644 --- a/Doc/reference/grammar.rst +++ b/Doc/reference/grammar.rst @@ -1,3 +1,5 @@ +.. _full-grammar-specification: + Full Grammar specification ========================== diff --git a/Doc/tools/.nitignore b/Doc/tools/.nitignore index 4d1d31d44fcf75..8b0fc13832d51a 100644 --- a/Doc/tools/.nitignore +++ b/Doc/tools/.nitignore @@ -35,7 +35,6 @@ Doc/library/bdb.rst Doc/library/bisect.rst Doc/library/calendar.rst Doc/library/cmd.rst -Doc/library/collections.abc.rst Doc/library/collections.rst Doc/library/concurrent.futures.rst Doc/library/configparser.rst @@ -63,11 +62,9 @@ Doc/library/locale.rst Doc/library/logging.config.rst Doc/library/logging.handlers.rst Doc/library/lzma.rst -Doc/library/mailbox.rst Doc/library/mmap.rst Doc/library/multiprocessing.rst Doc/library/multiprocessing.shared_memory.rst -Doc/library/numbers.rst Doc/library/optparse.rst Doc/library/os.rst Doc/library/pickle.rst @@ -81,7 +78,6 @@ Doc/library/pyexpat.rst Doc/library/random.rst Doc/library/readline.rst Doc/library/resource.rst -Doc/library/rlcompleter.rst Doc/library/select.rst Doc/library/signal.rst Doc/library/smtplib.rst @@ -90,7 +86,6 @@ Doc/library/ssl.rst Doc/library/stdtypes.rst Doc/library/string.rst Doc/library/subprocess.rst -Doc/library/tarfile.rst Doc/library/termios.rst Doc/library/test.rst Doc/library/tkinter.rst @@ -119,7 +114,6 @@ Doc/using/windows.rst Doc/whatsnew/2.0.rst Doc/whatsnew/2.1.rst Doc/whatsnew/2.2.rst -Doc/whatsnew/2.3.rst Doc/whatsnew/2.4.rst Doc/whatsnew/2.5.rst Doc/whatsnew/2.6.rst diff --git a/Doc/tutorial/controlflow.rst b/Doc/tutorial/controlflow.rst index aa9caa101da40a..77444f9cb8358d 100644 --- a/Doc/tutorial/controlflow.rst +++ b/Doc/tutorial/controlflow.rst @@ -559,10 +559,10 @@ defined to allow. For example:: def ask_ok(prompt, retries=4, reminder='Please try again!'): while True: - ok = input(prompt) - if ok in ('y', 'ye', 'yes'): + reply = input(prompt) + if reply in {'y', 'ye', 'yes'}: return True - if ok in ('n', 'no', 'nop', 'nope'): + if reply in {'n', 'no', 'nop', 'nope'}: return False retries = retries - 1 if retries < 0: diff --git a/Doc/tutorial/modules.rst b/Doc/tutorial/modules.rst index bf9e8e0b7b8066..0316239e776a95 100644 --- a/Doc/tutorial/modules.rst +++ b/Doc/tutorial/modules.rst @@ -437,7 +437,8 @@ When importing the package, Python searches through the directories on ``sys.path`` looking for the package subdirectory. The :file:`__init__.py` files are required to make Python treat directories -containing the file as packages. This prevents directories with a common name, +containing the file as packages (unless using a :term:`namespace package`, a +relatively advanced feature). This prevents directories with a common name, such as ``string``, from unintentionally hiding valid modules that occur later on the module search path. In the simplest case, :file:`__init__.py` can just be an empty file, but it can also execute initialization code for the package or diff --git a/Doc/whatsnew/2.3.rst b/Doc/whatsnew/2.3.rst index 0c77b339a182c9..8ebcbfaf248551 100644 --- a/Doc/whatsnew/2.3.rst +++ b/Doc/whatsnew/2.3.rst @@ -40,10 +40,10 @@ new feature. PEP 218: A Standard Set Datatype ================================ -The new :mod:`sets` module contains an implementation of a set datatype. The +The new :mod:`!sets` module contains an implementation of a set datatype. The :class:`Set` class is for mutable sets, sets that can have members added and -removed. The :class:`ImmutableSet` class is for sets that can't be modified, -and instances of :class:`ImmutableSet` can therefore be used as dictionary keys. +removed. The :class:`!ImmutableSet` class is for sets that can't be modified, +and instances of :class:`!ImmutableSet` can therefore be used as dictionary keys. Sets are built on top of dictionaries, so the elements within a set must be hashable. @@ -63,10 +63,10 @@ Here's a simple example:: Set([1, 2, 5]) >>> -The union and intersection of sets can be computed with the :meth:`union` and -:meth:`intersection` methods; an alternative notation uses the bitwise operators +The union and intersection of sets can be computed with the :meth:`~frozenset.union` and +:meth:`~frozenset.intersection` methods; an alternative notation uses the bitwise operators ``&`` and ``|``. Mutable sets also have in-place versions of these methods, -:meth:`union_update` and :meth:`intersection_update`. :: +:meth:`!union_update` and :meth:`~frozenset.intersection_update`. :: >>> S1 = sets.Set([1,2,3]) >>> S2 = sets.Set([4,5,6]) @@ -87,7 +87,7 @@ It's also possible to take the symmetric difference of two sets. This is the set of all elements in the union that aren't in the intersection. Another way of putting it is that the symmetric difference contains all elements that are in exactly one set. Again, there's an alternative notation (``^``), and an -in-place version with the ungainly name :meth:`symmetric_difference_update`. :: +in-place version with the ungainly name :meth:`~frozenset.symmetric_difference_update`. :: >>> S1 = sets.Set([1,2,3,4]) >>> S2 = sets.Set([3,4,5,6]) @@ -97,7 +97,7 @@ in-place version with the ungainly name :meth:`symmetric_difference_update`. :: Set([1, 2, 5, 6]) >>> -There are also :meth:`issubset` and :meth:`issuperset` methods for checking +There are also :meth:`!issubset` and :meth:`!issuperset` methods for checking whether one set is a subset or superset of another:: >>> S1 = sets.Set([1,2,3]) @@ -166,7 +166,7 @@ statement isn't allowed inside the :keyword:`try` block of a :keyword:`!try`...\ :keyword:`!finally` statement; read :pep:`255` for a full explanation of the interaction between :keyword:`!yield` and exceptions.) -Here's a sample usage of the :func:`generate_ints` generator:: +Here's a sample usage of the :func:`!generate_ints` generator:: >>> gen = generate_ints(3) >>> gen @@ -227,7 +227,7 @@ like:: sentence := "Store it in the neighboring harbor" if (i := find("or", sentence)) > 5 then write(i) -In Icon the :func:`find` function returns the indexes at which the substring +In Icon the :func:`!find` function returns the indexes at which the substring "or" is found: 3, 23, 33. In the :keyword:`if` statement, ``i`` is first assigned a value of 3, but 3 is less than 5, so the comparison fails, and Icon retries it with the second value of 23. 23 is greater than 5, so the comparison @@ -345,7 +345,7 @@ Python now allows using arbitrary Unicode strings (within the limitations of the file system) for all functions that expect file names, most notably the :func:`open` built-in function. If a Unicode string is passed to :func:`os.listdir`, Python now returns a list of Unicode strings. A new -function, :func:`os.getcwdu`, returns the current directory as a Unicode string. +function, :func:`!os.getcwdu`, returns the current directory as a Unicode string. Byte strings still work as file names, and on Windows Python will transparently convert them to Unicode using the ``mbcs`` encoding. @@ -386,10 +386,10 @@ one followed by the platform on which Python is running. Opening a file with the mode ``'U'`` or ``'rU'`` will open a file for reading in :term:`universal newlines` mode. All three line ending conventions will be translated to a ``'\n'`` in the strings returned by the various file methods such as -:meth:`read` and :meth:`readline`. +:meth:`!read` and :meth:`!readline`. Universal newline support is also used when importing modules and when executing -a file with the :func:`execfile` function. This means that Python modules can +a file with the :func:`!execfile` function. This means that Python modules can be shared between all three operating systems without needing to convert the line-endings. @@ -450,16 +450,16 @@ records to standard error or to a file or socket, send them to the system log, or even e-mail them to a particular address; of course, it's also possible to write your own handler classes. -The :class:`Logger` class is the primary class. Most application code will deal -with one or more :class:`Logger` objects, each one used by a particular -subsystem of the application. Each :class:`Logger` is identified by a name, and +The :class:`~logging.Logger` class is the primary class. Most application code will deal +with one or more :class:`~logging.Logger` objects, each one used by a particular +subsystem of the application. Each :class:`~logging.Logger` is identified by a name, and names are organized into a hierarchy using ``.`` as the component separator. -For example, you might have :class:`Logger` instances named ``server``, +For example, you might have :class:`~logging.Logger` instances named ``server``, ``server.auth`` and ``server.network``. The latter two instances are below ``server`` in the hierarchy. This means that if you turn up the verbosity for ``server`` or direct ``server`` messages to a different handler, the changes will also apply to records logged to ``server.auth`` and ``server.network``. -There's also a root :class:`Logger` that's the parent of all other loggers. +There's also a root :class:`~logging.Logger` that's the parent of all other loggers. For simple uses, the :mod:`logging` package contains some convenience functions that always use the root log:: @@ -480,14 +480,14 @@ This produces the following output:: In the default configuration, informational and debugging messages are suppressed and the output is sent to standard error. You can enable the display -of informational and debugging messages by calling the :meth:`setLevel` method +of informational and debugging messages by calling the :meth:`~logging.Logger.setLevel` method on the root logger. -Notice the :func:`warning` call's use of string formatting operators; all of the +Notice the :func:`~logging.warning` call's use of string formatting operators; all of the functions for logging messages take the arguments ``(msg, arg1, arg2, ...)`` and log the string resulting from ``msg % (arg1, arg2, ...)``. -There's also an :func:`exception` function that records the most recent +There's also an :func:`~logging.exception` function that records the most recent traceback. Any of the other functions will also record the traceback if you specify a true value for the keyword argument *exc_info*. :: @@ -517,16 +517,16 @@ it if it doesn't exist yet. ``getLogger(None)`` returns the root logger. :: ... Log records are usually propagated up the hierarchy, so a message logged to -``server.auth`` is also seen by ``server`` and ``root``, but a :class:`Logger` -can prevent this by setting its :attr:`propagate` attribute to :const:`False`. +``server.auth`` is also seen by ``server`` and ``root``, but a :class:`~logging.Logger` +can prevent this by setting its :attr:`~logging.Logger.propagate` attribute to :const:`False`. There are more classes provided by the :mod:`logging` package that can be -customized. When a :class:`Logger` instance is told to log a message, it -creates a :class:`LogRecord` instance that is sent to any number of different -:class:`Handler` instances. Loggers and handlers can also have an attached list -of filters, and each filter can cause the :class:`LogRecord` to be ignored or +customized. When a :class:`~logging.Logger` instance is told to log a message, it +creates a :class:`~logging.LogRecord` instance that is sent to any number of different +:class:`~logging.Handler` instances. Loggers and handlers can also have an attached list +of filters, and each filter can cause the :class:`~logging.LogRecord` to be ignored or can modify the record before passing it along. When they're finally output, -:class:`LogRecord` instances are converted to text by a :class:`Formatter` +:class:`~logging.LogRecord` instances are converted to text by a :class:`~logging.Formatter` class. All of these classes can be replaced by your own specially written classes. @@ -550,7 +550,7 @@ PEP 285: A Boolean Type ======================= A Boolean type was added to Python 2.3. Two new constants were added to the -:mod:`__builtin__` module, :const:`True` and :const:`False`. (:const:`True` and +:mod:`!__builtin__` module, :const:`True` and :const:`False`. (:const:`True` and :const:`False` constants were added to the built-ins in Python 2.2.1, but the 2.2.1 versions are simply set to integer values of 1 and 0 and aren't a different type.) @@ -662,7 +662,7 @@ a central catalog server. The resulting catalog is available from https://pypi.org. To make the catalog a bit more useful, a new optional *classifiers* keyword -argument has been added to the Distutils :func:`setup` function. A list of +argument has been added to the Distutils :func:`!setup` function. A list of `Trove `_-style strings can be supplied to help classify the software. @@ -703,14 +703,14 @@ PEP 302: New Import Hooks ========================= While it's been possible to write custom import hooks ever since the -:mod:`ihooks` module was introduced in Python 1.3, no one has ever been really +:mod:`!ihooks` module was introduced in Python 1.3, no one has ever been really happy with it because writing new import hooks is difficult and messy. There -have been various proposed alternatives such as the :mod:`imputil` and :mod:`iu` +have been various proposed alternatives such as the :mod:`!imputil` and :mod:`!iu` modules, but none of them has ever gained much acceptance, and none of them were easily usable from C code. :pep:`302` borrows ideas from its predecessors, especially from Gordon -McMillan's :mod:`iu` module. Three new items are added to the :mod:`sys` +McMillan's :mod:`!iu` module. Three new items are added to the :mod:`sys` module: * ``sys.path_hooks`` is a list of callable objects; most often they'll be @@ -790,7 +790,7 @@ package is much simpler:: for line in reader: print line -The :func:`reader` function takes a number of different options. The field +The :func:`~csv.reader` function takes a number of different options. The field separator isn't limited to the comma and can be changed to any character, and so can the quoting and line-ending characters. @@ -814,7 +814,7 @@ of tuples or lists, quoting strings that contain the delimiter. PEP 307: Pickle Enhancements ============================ -The :mod:`pickle` and :mod:`cPickle` modules received some attention during the +The :mod:`pickle` and :mod:`!cPickle` modules received some attention during the 2.3 development cycle. In 2.2, new-style classes could be pickled without difficulty, but they weren't pickled very compactly; :pep:`307` quotes a trivial example where a new-style class results in a pickled string three times longer @@ -829,13 +829,13 @@ fanciest protocol available. Unpickling is no longer considered a safe operation. 2.2's :mod:`pickle` provided hooks for trying to prevent unsafe classes from being unpickled -(specifically, a :attr:`__safe_for_unpickling__` attribute), but none of this +(specifically, a :attr:`!__safe_for_unpickling__` attribute), but none of this code was ever audited and therefore it's all been ripped out in 2.3. You should not unpickle untrusted data in any version of Python. To reduce the pickling overhead for new-style classes, a new interface for customizing pickling was added using three special methods: -:meth:`__getstate__`, :meth:`__setstate__`, and :meth:`__getnewargs__`. Consult +:meth:`~object.__getstate__`, :meth:`~object.__setstate__`, and :meth:`~object.__getnewargs__`. Consult :pep:`307` for the full semantics of these methods. As a way to compress pickles yet further, it's now possible to use integer codes @@ -939,7 +939,7 @@ Or use slice objects directly in subscripts:: To simplify implementing sequences that support extended slicing, slice objects now have a method ``indices(length)`` which, given the length of a sequence, returns a ``(start, stop, step)`` tuple that can be passed directly to -:func:`range`. :meth:`indices` handles omitted and out-of-bounds indices in a +:func:`range`. :meth:`!indices` handles omitted and out-of-bounds indices in a manner consistent with regular slices (and this innocuous phrase hides a welter of confusing details!). The method is intended to be used like this:: @@ -1042,7 +1042,7 @@ Here are all of the changes that Python 2.3 makes to the core Python language. execute any assertions. * Most type objects are now callable, so you can use them to create new objects - such as functions, classes, and modules. (This means that the :mod:`new` module + such as functions, classes, and modules. (This means that the :mod:`!new` module can be deprecated in a future Python version, because you can now use the type objects available in the :mod:`types` module.) For example, you can create a new module object with the following code: @@ -1069,11 +1069,11 @@ Here are all of the changes that Python 2.3 makes to the core Python language. * Using ``None`` as a variable name will now result in a :exc:`SyntaxWarning` warning. In a future version of Python, ``None`` may finally become a keyword. -* The :meth:`xreadlines` method of file objects, introduced in Python 2.1, is no +* The :meth:`!xreadlines` method of file objects, introduced in Python 2.1, is no longer necessary because files now behave as their own iterator. - :meth:`xreadlines` was originally introduced as a faster way to loop over all + :meth:`!xreadlines` was originally introduced as a faster way to loop over all the lines in a file, but now you can simply write ``for line in file_obj``. - File objects also have a new read-only :attr:`encoding` attribute that gives the + File objects also have a new read-only :attr:`!encoding` attribute that gives the encoding used by the file; Unicode strings written to the file will be automatically converted to bytes using the given encoding. @@ -1096,12 +1096,12 @@ Here are all of the changes that Python 2.3 makes to the core Python language. switching overhead. Some multithreaded applications may suffer slower response time, but that's easily fixed by setting the limit back to a lower number using ``sys.setcheckinterval(N)``. The limit can be retrieved with the new - :func:`sys.getcheckinterval` function. + :func:`!sys.getcheckinterval` function. * One minor but far-reaching change is that the names of extension types defined by the modules included with Python now contain the module and a ``'.'`` in front of the type name. For example, in Python 2.2, if you created a socket and - printed its :attr:`__class__`, you'd get this output:: + printed its :attr:`!__class__`, you'd get this output:: >>> s = socket.socket() >>> s.__class__ @@ -1138,9 +1138,9 @@ String Changes True Note that this doesn't tell you where the substring starts; if you need that - information, use the :meth:`find` string method. + information, use the :meth:`~str.find` string method. -* The :meth:`strip`, :meth:`lstrip`, and :meth:`rstrip` string methods now have +* The :meth:`~str.strip`, :meth:`~str.lstrip`, and :meth:`~str.rstrip` string methods now have an optional argument for specifying the characters to strip. The default is still to remove all whitespace characters:: @@ -1156,13 +1156,13 @@ String Changes (Suggested by Simon Brunning and implemented by Walter Dörwald.) -* The :meth:`startswith` and :meth:`endswith` string methods now accept negative +* The :meth:`~str.startswith` and :meth:`~str.endswith` string methods now accept negative numbers for the *start* and *end* parameters. -* Another new string method is :meth:`zfill`, originally a function in the - :mod:`string` module. :meth:`zfill` pads a numeric string with zeros on the +* Another new string method is :meth:`~str.zfill`, originally a function in the + :mod:`string` module. :meth:`~str.zfill` pads a numeric string with zeros on the left until it's the specified width. Note that the ``%`` operator is still more - flexible and powerful than :meth:`zfill`. :: + flexible and powerful than :meth:`~str.zfill`. :: >>> '45'.zfill(4) '0045' @@ -1173,10 +1173,10 @@ String Changes (Contributed by Walter Dörwald.) -* A new type object, :class:`basestring`, has been added. Both 8-bit strings and +* A new type object, :class:`!basestring`, has been added. Both 8-bit strings and Unicode strings inherit from this type, so ``isinstance(obj, basestring)`` will return :const:`True` for either kind of string. It's a completely abstract - type, so you can't create :class:`basestring` instances. + type, so you can't create :class:`!basestring` instances. * Interned strings are no longer immortal and will now be garbage-collected in the usual way when the only reference to them is from the internal dictionary of @@ -1191,7 +1191,7 @@ Optimizations * The creation of new-style class instances has been made much faster; they're now faster than classic classes! -* The :meth:`sort` method of list objects has been extensively rewritten by Tim +* The :meth:`~list.sort` method of list objects has been extensively rewritten by Tim Peters, and the implementation is significantly faster. * Multiplication of large long integers is now much faster thanks to an @@ -1203,7 +1203,7 @@ Optimizations increase, depending on your compiler's idiosyncrasies. See section :ref:`23section-other` for a longer explanation. (Removed by Michael Hudson.) -* :func:`xrange` objects now have their own iterator, making ``for i in +* :func:`!xrange` objects now have their own iterator, making ``for i in xrange(n)`` slightly faster than ``for i in range(n)``. (Patch by Raymond Hettinger.) @@ -1230,21 +1230,21 @@ complete list of changes, or look through the CVS logs for all the details. operator to add another array's contents, and the ``*=`` assignment operator to repeat an array. (Contributed by Jason Orendorff.) -* The :mod:`bsddb` module has been replaced by version 4.1.6 of the `PyBSDDB +* The :mod:`!bsddb` module has been replaced by version 4.1.6 of the `PyBSDDB `_ package, providing a more complete interface to the transactional features of the BerkeleyDB library. - The old version of the module has been renamed to :mod:`bsddb185` and is no + The old version of the module has been renamed to :mod:`!bsddb185` and is no longer built automatically; you'll have to edit :file:`Modules/Setup` to enable - it. Note that the new :mod:`bsddb` package is intended to be compatible with + it. Note that the new :mod:`!bsddb` package is intended to be compatible with the old module, so be sure to file bugs if you discover any incompatibilities. When upgrading to Python 2.3, if the new interpreter is compiled with a new version of the underlying BerkeleyDB library, you will almost certainly have to convert your database files to the new version. You can do this fairly easily with the new scripts :file:`db2pickle.py` and :file:`pickle2db.py` which you will find in the distribution's :file:`Tools/scripts` directory. If you've - already been using the PyBSDDB package and importing it as :mod:`bsddb3`, you - will have to change your ``import`` statements to import it as :mod:`bsddb`. + already been using the PyBSDDB package and importing it as :mod:`!bsddb3`, you + will have to change your ``import`` statements to import it as :mod:`!bsddb`. * The new :mod:`bz2` module is an interface to the bz2 data compression library. bz2-compressed data is usually smaller than corresponding @@ -1253,11 +1253,11 @@ complete list of changes, or look through the CVS logs for all the details. * A set of standard date/time types has been added in the new :mod:`datetime` module. See the following section for more details. -* The Distutils :class:`Extension` class now supports an extra constructor +* The Distutils :class:`!Extension` class now supports an extra constructor argument named *depends* for listing additional source files that an extension depends on. This lets Distutils recompile the module if any of the dependency files are modified. For example, if :file:`sampmodule.c` includes the header - file :file:`sample.h`, you would create the :class:`Extension` object like + file :file:`sample.h`, you would create the :class:`!Extension` object like this:: ext = Extension("samp", @@ -1268,21 +1268,21 @@ complete list of changes, or look through the CVS logs for all the details. (Contributed by Jeremy Hylton.) * Other minor changes to Distutils: it now checks for the :envvar:`CC`, - :envvar:`CFLAGS`, :envvar:`CPP`, :envvar:`LDFLAGS`, and :envvar:`CPPFLAGS` + :envvar:`CFLAGS`, :envvar:`!CPP`, :envvar:`LDFLAGS`, and :envvar:`CPPFLAGS` environment variables, using them to override the settings in Python's configuration (contributed by Robert Weber). * Previously the :mod:`doctest` module would only search the docstrings of public methods and functions for test cases, but it now also examines private - ones as well. The :func:`DocTestSuite` function creates a + ones as well. The :func:`~doctest.DocTestSuite` function creates a :class:`unittest.TestSuite` object from a set of :mod:`doctest` tests. * The new ``gc.get_referents(object)`` function returns a list of all the objects referenced by *object*. -* The :mod:`getopt` module gained a new function, :func:`gnu_getopt`, that - supports the same arguments as the existing :func:`getopt` function but uses - GNU-style scanning mode. The existing :func:`getopt` stops processing options as +* The :mod:`getopt` module gained a new function, :func:`~getopt.gnu_getopt`, that + supports the same arguments as the existing :func:`~getopt.getopt` function but uses + GNU-style scanning mode. The existing :func:`~getopt.getopt` stops processing options as soon as a non-option argument is encountered, but in GNU-style mode processing continues, meaning that options and arguments can be mixed. For example:: @@ -1311,7 +1311,7 @@ complete list of changes, or look through the CVS logs for all the details. O(lg n). (See https://xlinux.nist.gov/dads//HTML/priorityque.html for more information about the priority queue data structure.) - The :mod:`heapq` module provides :func:`heappush` and :func:`heappop` functions + The :mod:`heapq` module provides :func:`~heapq.heappush` and :func:`~heapq.heappop` functions for adding and removing items while maintaining the heap property on top of some other mutable Python sequence type. Here's an example that uses a Python list:: @@ -1343,7 +1343,7 @@ complete list of changes, or look through the CVS logs for all the details. * The :mod:`itertools` contains a number of useful functions for use with iterators, inspired by various functions provided by the ML and Haskell languages. For example, ``itertools.ifilter(predicate, iterator)`` returns all - elements in the iterator for which the function :func:`predicate` returns + elements in the iterator for which the function :func:`!predicate` returns :const:`True`, and ``itertools.repeat(obj, N)`` returns ``obj`` *N* times. There are a number of other functions in the module; see the package's reference documentation for details. @@ -1356,9 +1356,9 @@ complete list of changes, or look through the CVS logs for all the details. was added to :func:`math.log` to make it easier to compute logarithms for bases other than ``e`` and ``10``. (Contributed by Raymond Hettinger.) -* Several new POSIX functions (:func:`getpgid`, :func:`killpg`, :func:`lchown`, - :func:`loadavg`, :func:`major`, :func:`makedev`, :func:`minor`, and - :func:`mknod`) were added to the :mod:`posix` module that underlies the +* Several new POSIX functions (:func:`!getpgid`, :func:`!killpg`, :func:`!lchown`, + :func:`!loadavg`, :func:`!major`, :func:`!makedev`, :func:`!minor`, and + :func:`!mknod`) were added to the :mod:`posix` module that underlies the :mod:`os` module. (Contributed by Gustavo Niemeyer, Geert Jansen, and Denis S. Otkidach.) @@ -1368,9 +1368,9 @@ complete list of changes, or look through the CVS logs for all the details. During testing, it was found that some applications will break if time stamps are floats. For compatibility, when using the tuple interface of the - :class:`stat_result` time stamps will be represented as integers. When using + :class:`~os.stat_result` time stamps will be represented as integers. When using named fields (a feature first introduced in Python 2.2), time stamps are still - represented as integers, unless :func:`os.stat_float_times` is invoked to enable + represented as integers, unless :func:`!os.stat_float_times` is invoked to enable float return values:: >>> os.stat("/tmp").st_mtime @@ -1391,7 +1391,7 @@ complete list of changes, or look through the CVS logs for all the details. automatically generate a usage message. See the following section for more details. -* The old and never-documented :mod:`linuxaudiodev` module has been deprecated, +* The old and never-documented :mod:`!linuxaudiodev` module has been deprecated, and a new version named :mod:`!ossaudiodev` has been added. The module was renamed because the OSS sound drivers can be used on platforms other than Linux, and the interface has also been tidied and brought up to date in various ways. @@ -1402,14 +1402,14 @@ complete list of changes, or look through the CVS logs for all the details. functions for getting the architecture, CPU type, the Windows OS version, and even the Linux distribution version. (Contributed by Marc-André Lemburg.) -* The parser objects provided by the :mod:`pyexpat` module can now optionally +* The parser objects provided by the :mod:`pyexpat ` module can now optionally buffer character data, resulting in fewer calls to your character data handler and therefore faster performance. Setting the parser object's - :attr:`buffer_text` attribute to :const:`True` will enable buffering. + :attr:`~xml.parsers.expat.xmlparser.buffer_text` attribute to :const:`True` will enable buffering. * The ``sample(population, k)`` function was added to the :mod:`random` - module. *population* is a sequence or :class:`xrange` object containing the - elements of a population, and :func:`sample` chooses *k* elements from the + module. *population* is a sequence or :class:`!xrange` object containing the + elements of a population, and :func:`~random.sample` chooses *k* elements from the population without replacing chosen elements. *k* can be any value up to ``len(population)``. For example:: @@ -1436,20 +1436,20 @@ complete list of changes, or look through the CVS logs for all the details. (All changes contributed by Raymond Hettinger.) * The :mod:`readline` module also gained a number of new functions: - :func:`get_history_item`, :func:`get_current_history_length`, and - :func:`redisplay`. + :func:`~readline.get_history_item`, :func:`~readline.get_current_history_length`, and + :func:`~readline.redisplay`. -* The :mod:`rexec` and :mod:`Bastion` modules have been declared dead, and +* The :mod:`!rexec` and :mod:`!Bastion` modules have been declared dead, and attempts to import them will fail with a :exc:`RuntimeError`. New-style classes provide new ways to break out of the restricted execution environment provided - by :mod:`rexec`, and no one has interest in fixing them or time to do so. If - you have applications using :mod:`rexec`, rewrite them to use something else. + by :mod:`!rexec`, and no one has interest in fixing them or time to do so. If + you have applications using :mod:`!rexec`, rewrite them to use something else. (Sticking with Python 2.2 or 2.1 will not make your applications any safer - because there are known bugs in the :mod:`rexec` module in those versions. To - repeat: if you're using :mod:`rexec`, stop using it immediately.) + because there are known bugs in the :mod:`!rexec` module in those versions. To + repeat: if you're using :mod:`!rexec`, stop using it immediately.) -* The :mod:`rotor` module has been deprecated because the algorithm it uses for +* The :mod:`!rotor` module has been deprecated because the algorithm it uses for encryption is not believed to be secure. If you need encryption, use one of the several AES Python modules that are available separately. @@ -1474,9 +1474,9 @@ complete list of changes, or look through the CVS logs for all the details. * On Windows, the :mod:`socket` module now ships with Secure Sockets Layer (SSL) support. -* The value of the C :c:macro:`PYTHON_API_VERSION` macro is now exposed at the +* The value of the C :c:macro:`!PYTHON_API_VERSION` macro is now exposed at the Python level as ``sys.api_version``. The current exception can be cleared by - calling the new :func:`sys.exc_clear` function. + calling the new :func:`!sys.exc_clear` function. * The new :mod:`tarfile` module allows reading from and writing to :program:`tar`\ -format archive files. (Contributed by Lars Gustäbel.) @@ -1486,7 +1486,7 @@ complete list of changes, or look through the CVS logs for all the details. string and returns a list containing the text split into lines of no more than the chosen width. The ``fill(text, width)`` function returns a single string, reformatted to fit into lines no longer than the chosen width. (As you - can guess, :func:`fill` is built on top of :func:`wrap`. For example:: + can guess, :func:`~textwrap.fill` is built on top of :func:`~textwrap.wrap`. For example:: >>> import textwrap >>> paragraph = "Not a whit, we defy augury: ... more text ..." @@ -1503,15 +1503,15 @@ complete list of changes, or look through the CVS logs for all the details. it will come: the readiness is all. >>> - The module also contains a :class:`TextWrapper` class that actually implements - the text wrapping strategy. Both the :class:`TextWrapper` class and the - :func:`wrap` and :func:`fill` functions support a number of additional keyword + The module also contains a :class:`~textwrap.TextWrapper` class that actually implements + the text wrapping strategy. Both the :class:`~textwrap.TextWrapper` class and the + :func:`~textwrap.wrap` and :func:`~textwrap.fill` functions support a number of additional keyword arguments for fine-tuning the formatting; consult the module's documentation for details. (Contributed by Greg Ward.) -* The :mod:`thread` and :mod:`threading` modules now have companion modules, - :mod:`dummy_thread` and :mod:`dummy_threading`, that provide a do-nothing - implementation of the :mod:`thread` module's interface for platforms where +* The :mod:`!thread` and :mod:`threading` modules now have companion modules, + :mod:`!dummy_thread` and :mod:`!dummy_threading`, that provide a do-nothing + implementation of the :mod:`!thread` module's interface for platforms where threads are not supported. The intention is to simplify thread-aware modules (ones that *don't* rely on threads to run) by putting the following code at the top:: @@ -1521,26 +1521,26 @@ complete list of changes, or look through the CVS logs for all the details. except ImportError: import dummy_threading as _threading - In this example, :mod:`_threading` is used as the module name to make it clear + In this example, :mod:`!_threading` is used as the module name to make it clear that the module being used is not necessarily the actual :mod:`threading` - module. Code can call functions and use classes in :mod:`_threading` whether or + module. Code can call functions and use classes in :mod:`!_threading` whether or not threads are supported, avoiding an :keyword:`if` statement and making the code slightly clearer. This module will not magically make multithreaded code run without threads; code that waits for another thread to return or to do something will simply hang forever. -* The :mod:`time` module's :func:`strptime` function has long been an annoyance - because it uses the platform C library's :func:`strptime` implementation, and +* The :mod:`time` module's :func:`~time.strptime` function has long been an annoyance + because it uses the platform C library's :func:`~time.strptime` implementation, and different platforms sometimes have odd bugs. Brett Cannon contributed a portable implementation that's written in pure Python and should behave identically on all platforms. * The new :mod:`timeit` module helps measure how long snippets of Python code take to execute. The :file:`timeit.py` file can be run directly from the - command line, or the module's :class:`Timer` class can be imported and used + command line, or the module's :class:`~timeit.Timer` class can be imported and used directly. Here's a short example that figures out whether it's faster to convert an 8-bit string to Unicode by appending an empty Unicode string to it or - by using the :func:`unicode` function:: + by using the :func:`!unicode` function:: import timeit @@ -1558,46 +1558,46 @@ complete list of changes, or look through the CVS logs for all the details. * The :mod:`!Tix` module has received various bug fixes and updates for the current version of the Tix package. -* The :mod:`Tkinter` module now works with a thread-enabled version of Tcl. +* The :mod:`!Tkinter` module now works with a thread-enabled version of Tcl. Tcl's threading model requires that widgets only be accessed from the thread in which they're created; accesses from another thread can cause Tcl to panic. For - certain Tcl interfaces, :mod:`Tkinter` will now automatically avoid this when a + certain Tcl interfaces, :mod:`!Tkinter` will now automatically avoid this when a widget is accessed from a different thread by marshalling a command, passing it to the correct thread, and waiting for the results. Other interfaces can't be - handled automatically but :mod:`Tkinter` will now raise an exception on such an + handled automatically but :mod:`!Tkinter` will now raise an exception on such an access so that you can at least find out about the problem. See https://mail.python.org/pipermail/python-dev/2002-December/031107.html for a more detailed explanation of this change. (Implemented by Martin von Löwis.) -* Calling Tcl methods through :mod:`_tkinter` no longer returns only strings. +* Calling Tcl methods through :mod:`!_tkinter` no longer returns only strings. Instead, if Tcl returns other objects those objects are converted to their - Python equivalent, if one exists, or wrapped with a :class:`_tkinter.Tcl_Obj` + Python equivalent, if one exists, or wrapped with a :class:`!_tkinter.Tcl_Obj` object if no Python equivalent exists. This behavior can be controlled through - the :meth:`wantobjects` method of :class:`tkapp` objects. + the :meth:`!wantobjects` method of :class:`!tkapp` objects. - When using :mod:`_tkinter` through the :mod:`Tkinter` module (as most Tkinter + When using :mod:`!_tkinter` through the :mod:`!Tkinter` module (as most Tkinter applications will), this feature is always activated. It should not cause compatibility problems, since Tkinter would always convert string results to Python types where possible. If any incompatibilities are found, the old behavior can be restored by setting - the :attr:`wantobjects` variable in the :mod:`Tkinter` module to false before - creating the first :class:`tkapp` object. :: + the :attr:`!wantobjects` variable in the :mod:`!Tkinter` module to false before + creating the first :class:`!tkapp` object. :: import Tkinter Tkinter.wantobjects = 0 Any breakage caused by this change should be reported as a bug. -* The :mod:`UserDict` module has a new :class:`DictMixin` class which defines +* The :mod:`!UserDict` module has a new :class:`!DictMixin` class which defines all dictionary methods for classes that already have a minimum mapping interface. This greatly simplifies writing classes that need to be substitutable for dictionaries, such as the classes in the :mod:`shelve` module. Adding the mix-in as a superclass provides the full dictionary interface - whenever the class defines :meth:`~object.__getitem__`, :meth:`__setitem__`, - :meth:`__delitem__`, and :meth:`keys`. For example:: + whenever the class defines :meth:`~object.__getitem__`, :meth:`~object.__setitem__`, + :meth:`~object.__delitem__`, and :meth:`!keys`. For example:: >>> import UserDict >>> class SeqDict(UserDict.DictMixin): @@ -1640,15 +1640,15 @@ complete list of changes, or look through the CVS logs for all the details. * The DOM implementation in :mod:`xml.dom.minidom` can now generate XML output in a particular encoding by providing an optional encoding argument to the - :meth:`toxml` and :meth:`toprettyxml` methods of DOM nodes. + :meth:`~xml.dom.minidom.Node.toxml` and :meth:`~xml.dom.minidom.Node.toprettyxml` methods of DOM nodes. -* The :mod:`xmlrpclib` module now supports an XML-RPC extension for handling nil +* The :mod:`!xmlrpclib` module now supports an XML-RPC extension for handling nil data values such as Python's ``None``. Nil values are always supported on unmarshalling an XML-RPC response. To generate requests containing ``None``, you must supply a true value for the *allow_none* parameter when creating a - :class:`Marshaller` instance. + :class:`!Marshaller` instance. -* The new :mod:`DocXMLRPCServer` module allows writing self-documenting XML-RPC +* The new :mod:`!DocXMLRPCServer` module allows writing self-documenting XML-RPC servers. Run it in demo mode (as a program) to see it in action. Pointing the web browser to the RPC server produces pydoc-style documentation; pointing xmlrpclib to the server allows invoking the actual methods. (Contributed by @@ -1663,8 +1663,8 @@ complete list of changes, or look through the CVS logs for all the details. The :mod:`socket` module has also been extended to transparently convert Unicode hostnames to the ACE version before passing them to the C library. - Modules that deal with hostnames such as :mod:`httplib` and :mod:`ftplib`) - also support Unicode host names; :mod:`httplib` also sends HTTP ``Host`` + Modules that deal with hostnames such as :mod:`!httplib` and :mod:`ftplib`) + also support Unicode host names; :mod:`!httplib` also sends HTTP ``Host`` headers using the ACE version of the domain name. :mod:`urllib` supports Unicode URLs with non-ASCII host names as long as the ``path`` part of the URL is ASCII only. @@ -1682,17 +1682,17 @@ Date and time types suitable for expressing timestamps were added as the :mod:`datetime` module. The types don't support different calendars or many fancy features, and just stick to the basics of representing time. -The three primary types are: :class:`date`, representing a day, month, and year; +The three primary types are: :class:`~datetime.date`, representing a day, month, and year; :class:`~datetime.time`, consisting of hour, minute, and second; and :class:`~datetime.datetime`, -which contains all the attributes of both :class:`date` and :class:`~datetime.time`. -There's also a :class:`timedelta` class representing differences between two +which contains all the attributes of both :class:`~datetime.date` and :class:`~datetime.time`. +There's also a :class:`~datetime.timedelta` class representing differences between two points in time, and time zone logic is implemented by classes inheriting from -the abstract :class:`tzinfo` class. +the abstract :class:`~datetime.tzinfo` class. -You can create instances of :class:`date` and :class:`~datetime.time` by either supplying +You can create instances of :class:`~datetime.date` and :class:`~datetime.time` by either supplying keyword arguments to the appropriate constructor, e.g. ``datetime.date(year=1972, month=10, day=15)``, or by using one of a number of -class methods. For example, the :meth:`date.today` class method returns the +class methods. For example, the :meth:`~datetime.date.today` class method returns the current local date. Once created, instances of the date/time classes are all immutable. There are a @@ -1707,8 +1707,8 @@ number of methods for producing formatted strings from objects:: >>> now.strftime('%Y %d %b') '2002 30 Dec' -The :meth:`replace` method allows modifying one or more fields of a -:class:`date` or :class:`~datetime.datetime` instance, returning a new instance:: +The :meth:`~datetime.datetime.replace` method allows modifying one or more fields of a +:class:`~datetime.date` or :class:`~datetime.datetime` instance, returning a new instance:: >>> d = datetime.datetime.now() >>> d @@ -1718,10 +1718,10 @@ The :meth:`replace` method allows modifying one or more fields of a >>> Instances can be compared, hashed, and converted to strings (the result is the -same as that of :meth:`isoformat`). :class:`date` and :class:`~datetime.datetime` -instances can be subtracted from each other, and added to :class:`timedelta` +same as that of :meth:`~datetime.datetime.isoformat`). :class:`~datetime.date` and :class:`~datetime.datetime` +instances can be subtracted from each other, and added to :class:`~datetime.timedelta` instances. The largest missing feature is that there's no standard library -support for parsing strings and getting back a :class:`date` or +support for parsing strings and getting back a :class:`~datetime.date` or :class:`~datetime.datetime`. For more information, refer to the module's reference documentation. @@ -1739,7 +1739,7 @@ command-line parsing that follows the Unix conventions, automatically creates the output for :option:`!--help`, and can perform different actions for different options. -You start by creating an instance of :class:`OptionParser` and telling it what +You start by creating an instance of :class:`~optparse.OptionParser` and telling it what your program's options are. :: import sys @@ -1753,7 +1753,7 @@ your program's options are. :: action='store', type='int', dest='length', help='set maximum length of output') -Parsing a command line is then done by calling the :meth:`parse_args` method. :: +Parsing a command line is then done by calling the :meth:`~optparse.OptionParser.parse_args` method. :: options, args = op.parse_args(sys.argv[1:]) print options @@ -1925,7 +1925,7 @@ Changes to Python's build process and to the C API include: dependence on a system version or local installation of Expat. * If you dynamically allocate type objects in your extension, you should be - aware of a change in the rules relating to the :attr:`__module__` and + aware of a change in the rules relating to the :attr:`!__module__` and :attr:`~definition.__name__` attributes. In summary, you will want to ensure the type's dictionary contains a ``'__module__'`` key; making the module name the part of the type name leading up to the final period will no longer have the desired @@ -1940,7 +1940,7 @@ Port-Specific Changes Support for a port to IBM's OS/2 using the EMX runtime environment was merged into the main Python source tree. EMX is a POSIX emulation layer over the OS/2 system APIs. The Python port for EMX tries to support all the POSIX-like -capability exposed by the EMX runtime, and mostly succeeds; :func:`fork` and +capability exposed by the EMX runtime, and mostly succeeds; :func:`!fork` and :func:`fcntl` are restricted by the limitations of the underlying emulation layer. The standard OS/2 port, which uses IBM's Visual Age compiler, also gained support for case-sensitive import semantics as part of the integration of @@ -2031,9 +2031,9 @@ code: the file's encoding (UTF-8, Latin-1, or whatever) by adding a comment to the top of the file. See section :ref:`section-encodings` for more information. -* Calling Tcl methods through :mod:`_tkinter` no longer returns only strings. +* Calling Tcl methods through :mod:`!_tkinter` no longer returns only strings. Instead, if Tcl returns other objects those objects are converted to their - Python equivalent, if one exists, or wrapped with a :class:`_tkinter.Tcl_Obj` + Python equivalent, if one exists, or wrapped with a :class:`!_tkinter.Tcl_Obj` object if no Python equivalent exists. * Large octal and hex literals such as ``0xffffffff`` now trigger a @@ -2049,10 +2049,10 @@ code: * You can no longer disable assertions by assigning to ``__debug__``. -* The Distutils :func:`setup` function has gained various new keyword arguments +* The Distutils :func:`!setup` function has gained various new keyword arguments such as *depends*. Old versions of the Distutils will abort if passed unknown keywords. A solution is to check for the presence of the new - :func:`get_distutil_options` function in your :file:`setup.py` and only uses the + :func:`!get_distutil_options` function in your :file:`setup.py` and only uses the new keywords with a version of the Distutils that supports them:: from distutils import core diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst index e22257853d8333..2c869cbe11396b 100644 --- a/Doc/whatsnew/3.13.rst +++ b/Doc/whatsnew/3.13.rst @@ -199,6 +199,27 @@ doctest :attr:`doctest.TestResults.skipped` attributes. (Contributed by Victor Stinner in :gh:`108794`.) +email +----- + +* :func:`email.utils.getaddresses` and :func:`email.utils.parseaddr` now return + ``('', '')`` 2-tuples in more situations where invalid email addresses are + encountered instead of potentially inaccurate values. Add optional *strict* + parameter to these two functions: use ``strict=False`` to get the old + behavior, accept malformed inputs. + ``getattr(email.utils, 'supports_strict_parsing', False)`` can be use to + check if the *strict* paramater is available. + (Contributed by Thomas Dwyer and Victor Stinner for :gh:`102988` to improve + the CVE-2023-27043 fix.) + +fractions +--------- + +* Formatting for objects of type :class:`fractions.Fraction` now supports + the standard format specification mini-language rules for fill, alignment, + sign handling, minimum width and grouping. (Contributed by Mark Dickinson + in :gh:`111320`) + glob ---- @@ -268,6 +289,15 @@ os ``False`` on Windows. (Contributed by Serhiy Storchaka in :gh:`59616`) +* :func:`os.posix_spawn` now accepts ``env=None``, which makes the newly spawned + process use the current process environment. + (Contributed by Jakub Kulik in :gh:`113119`.) + +* :func:`os.posix_spawn` gains an :attr:`os.POSIX_SPAWN_CLOSEFROM` attribute for + use in ``file_actions=`` on platforms that support + :c:func:`!posix_spawn_file_actions_addclosefrom_np`. + (Contributed by Jakub Kulik in :gh:`113117`.) + pathlib ------- @@ -317,6 +347,21 @@ sqlite3 object is not :meth:`closed ` explicitly. (Contributed by Erlend E. Aasland in :gh:`105539`.) +subprocess +---------- + +* The :mod:`subprocess` module now uses the :func:`os.posix_spawn` function in + more situations. Notably in the default case of ``close_fds=True`` on more + recent versions of platforms including Linux, FreeBSD, and Solaris where the + C library provides :c:func:`!posix_spawn_file_actions_addclosefrom_np`. + On Linux this should perform similar to our existing Linux :c:func:`!vfork` + based code. A private control knob :attr:`!subprocess._USE_POSIX_SPAWN` can + be set to ``False`` if you need to force :mod:`subprocess` not to ever use + :func:`os.posix_spawn`. Please report your reason and platform details in + the CPython issue tracker if you set this so that we can improve our API + selection logic for everyone. + (Contributed by Jakub Kulik in :gh:`113117`.) + sys --- @@ -390,6 +435,12 @@ Optimizations * :func:`textwrap.indent` is now ~30% faster than before for large input. (Contributed by Inada Naoki in :gh:`107369`.) +* The :mod:`subprocess` module uses :func:`os.posix_spawn` in more situations + including the default where ``close_fds=True`` on many modern platforms. This + should provide a noteworthy performance increase launching processes on + FreeBSD and Solaris. See the ``subprocess`` section above for details. + (Contributed by Jakub Kulik in :gh:`113117`.) + Deprecated ========== diff --git a/Include/internal/pycore_lock.h b/Include/internal/pycore_lock.h index 03ad1c9fd584b5..18a8896d97a548 100644 --- a/Include/internal/pycore_lock.h +++ b/Include/internal/pycore_lock.h @@ -213,6 +213,45 @@ _PyOnceFlag_CallOnce(_PyOnceFlag *flag, _Py_once_fn_t *fn, void *arg) return _PyOnceFlag_CallOnceSlow(flag, fn, arg); } +// A readers-writer (RW) lock. The lock supports multiple concurrent readers or +// a single writer. The lock is write-preferring: if a writer is waiting while +// the lock is read-locked then, new readers will be blocked. This avoids +// starvation of writers. +// +// In C++, the equivalent synchronization primitive is std::shared_mutex +// with shared ("read") and exclusive ("write") locking. +// +// The two least significant bits are used to indicate if the lock is +// write-locked and if there are parked threads (either readers or writers) +// waiting to acquire the lock. The remaining bits are used to indicate the +// number of readers holding the lock. +// +// 0b000..00000: unlocked +// 0bnnn..nnn00: nnn..nnn readers holding the lock +// 0bnnn..nnn10: nnn..nnn readers holding the lock and a writer is waiting +// 0b00000..010: unlocked with awoken writer about to acquire lock +// 0b00000..001: write-locked +// 0b00000..011: write-locked and readers or other writers are waiting +// +// Note that reader_count must be zero if the lock is held by a writer, and +// vice versa. The lock can only be held by readers or a writer, but not both. +// +// The design is optimized for simplicity of the implementation. The lock is +// not fair: if fairness is desired, use an additional PyMutex to serialize +// writers. The lock is also not reentrant. +typedef struct { + uintptr_t bits; +} _PyRWMutex; + +// Read lock (i.e., shared lock) +PyAPI_FUNC(void) _PyRWMutex_RLock(_PyRWMutex *rwmutex); +PyAPI_FUNC(void) _PyRWMutex_RUnlock(_PyRWMutex *rwmutex); + +// Write lock (i.e., exclusive lock) +PyAPI_FUNC(void) _PyRWMutex_Lock(_PyRWMutex *rwmutex); +PyAPI_FUNC(void) _PyRWMutex_Unlock(_PyRWMutex *rwmutex); + + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h index 4670c34a833963..36b6cd52d2b272 100644 --- a/Include/internal/pycore_opcode_metadata.h +++ b/Include/internal/pycore_opcode_metadata.h @@ -31,636 +31,636 @@ extern int _PyOpcode_num_popped(int opcode, int oparg, bool jump); #ifdef NEED_OPCODE_METADATA int _PyOpcode_num_popped(int opcode, int oparg, bool jump) { switch(opcode) { - case NOP: - return 0; - case RESUME: - return 0; - case RESUME_CHECK: - return 0; - case INSTRUMENTED_RESUME: - return 0; - case LOAD_CLOSURE: - return 0; - case LOAD_FAST_CHECK: - return 0; - case LOAD_FAST: - return 0; - case LOAD_FAST_AND_CLEAR: - return 0; - case LOAD_FAST_LOAD_FAST: - return 0; - case LOAD_CONST: - return 0; - case STORE_FAST: - return 1; - case STORE_FAST_MAYBE_NULL: - return 1; - case STORE_FAST_LOAD_FAST: + case BEFORE_ASYNC_WITH: return 1; - case STORE_FAST_STORE_FAST: - return 2; - case POP_TOP: + case BEFORE_WITH: return 1; - case PUSH_NULL: - return 0; - case END_FOR: - return 2; - case INSTRUMENTED_END_FOR: - return 2; - case END_SEND: + case BINARY_OP: return 2; - case INSTRUMENTED_END_SEND: + case BINARY_OP_ADD_FLOAT: return 2; - case UNARY_NEGATIVE: - return 1; - case UNARY_NOT: - return 1; - case _SPECIALIZE_TO_BOOL: - return 1; - case _TO_BOOL: - return 1; - case TO_BOOL: - return 1; - case TO_BOOL_BOOL: - return 1; - case TO_BOOL_INT: - return 1; - case TO_BOOL_LIST: - return 1; - case TO_BOOL_NONE: - return 1; - case TO_BOOL_STR: - return 1; - case TO_BOOL_ALWAYS_TRUE: - return 1; - case UNARY_INVERT: - return 1; - case _GUARD_BOTH_INT: + case BINARY_OP_ADD_INT: return 2; - case _BINARY_OP_MULTIPLY_INT: + case BINARY_OP_ADD_UNICODE: return 2; - case _BINARY_OP_ADD_INT: + case BINARY_OP_INPLACE_ADD_UNICODE: return 2; - case _BINARY_OP_SUBTRACT_INT: + case BINARY_OP_MULTIPLY_FLOAT: return 2; case BINARY_OP_MULTIPLY_INT: return 2; - case BINARY_OP_ADD_INT: + case BINARY_OP_SUBTRACT_FLOAT: return 2; case BINARY_OP_SUBTRACT_INT: return 2; - case _GUARD_BOTH_FLOAT: - return 2; - case _BINARY_OP_MULTIPLY_FLOAT: - return 2; - case _BINARY_OP_ADD_FLOAT: - return 2; - case _BINARY_OP_SUBTRACT_FLOAT: - return 2; - case BINARY_OP_MULTIPLY_FLOAT: + case BINARY_SLICE: + return 3; + case BINARY_SUBSCR: return 2; - case BINARY_OP_ADD_FLOAT: + case BINARY_SUBSCR_DICT: return 2; - case BINARY_OP_SUBTRACT_FLOAT: + case BINARY_SUBSCR_GETITEM: return 2; - case _GUARD_BOTH_UNICODE: + case BINARY_SUBSCR_LIST_INT: return 2; - case _BINARY_OP_ADD_UNICODE: + case BINARY_SUBSCR_STR_INT: return 2; - case BINARY_OP_ADD_UNICODE: + case BINARY_SUBSCR_TUPLE_INT: return 2; - case _BINARY_OP_INPLACE_ADD_UNICODE: + case BUILD_CONST_KEY_MAP: + return oparg + 1; + case BUILD_LIST: + return oparg; + case BUILD_MAP: + return oparg*2; + case BUILD_SET: + return oparg; + case BUILD_SLICE: + return ((oparg == 3) ? 1 : 0) + 2; + case BUILD_STRING: + return oparg; + case BUILD_TUPLE: + return oparg; + case CACHE: + return 0; + case CALL: + return oparg + 2; + case CALL_ALLOC_AND_ENTER_INIT: + return oparg + 2; + case CALL_BOUND_METHOD_EXACT_ARGS: + return oparg + 2; + case CALL_BUILTIN_CLASS: + return oparg + 2; + case CALL_BUILTIN_FAST: + return oparg + 2; + case CALL_BUILTIN_FAST_WITH_KEYWORDS: + return oparg + 2; + case CALL_BUILTIN_O: + return oparg + 2; + case CALL_FUNCTION_EX: + return ((oparg & 1) ? 1 : 0) + 3; + case CALL_INTRINSIC_1: + return 1; + case CALL_INTRINSIC_2: return 2; - case BINARY_OP_INPLACE_ADD_UNICODE: + case CALL_ISINSTANCE: + return oparg + 2; + case CALL_KW: + return oparg + 3; + case CALL_LEN: + return oparg + 2; + case CALL_LIST_APPEND: + return oparg + 2; + case CALL_METHOD_DESCRIPTOR_FAST: + return oparg + 2; + case CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS: + return oparg + 2; + case CALL_METHOD_DESCRIPTOR_NOARGS: + return oparg + 2; + case CALL_METHOD_DESCRIPTOR_O: + return oparg + 2; + case CALL_PY_EXACT_ARGS: + return oparg + 2; + case CALL_PY_WITH_DEFAULTS: + return oparg + 2; + case CALL_STR_1: + return oparg + 2; + case CALL_TUPLE_1: + return oparg + 2; + case CALL_TYPE_1: + return oparg + 2; + case CHECK_EG_MATCH: return 2; - case _SPECIALIZE_BINARY_SUBSCR: + case CHECK_EXC_MATCH: return 2; - case _BINARY_SUBSCR: + case CLEANUP_THROW: + return 3; + case COMPARE_OP: return 2; - case BINARY_SUBSCR: + case COMPARE_OP_FLOAT: return 2; - case BINARY_SLICE: - return 3; - case STORE_SLICE: - return 4; - case BINARY_SUBSCR_LIST_INT: + case COMPARE_OP_INT: return 2; - case BINARY_SUBSCR_STR_INT: + case COMPARE_OP_STR: return 2; - case BINARY_SUBSCR_TUPLE_INT: + case CONTAINS_OP: return 2; - case BINARY_SUBSCR_DICT: + case CONVERT_VALUE: + return 1; + case COPY: + return (oparg-1) + 1; + case COPY_FREE_VARS: + return 0; + case DELETE_ATTR: + return 1; + case DELETE_DEREF: + return 0; + case DELETE_FAST: + return 0; + case DELETE_GLOBAL: + return 0; + case DELETE_NAME: + return 0; + case DELETE_SUBSCR: return 2; - case BINARY_SUBSCR_GETITEM: + case DICT_MERGE: + return (oparg - 1) + 5; + case DICT_UPDATE: + return (oparg - 1) + 2; + case END_ASYNC_FOR: return 2; - case LIST_APPEND: - return (oparg-1) + 2; - case SET_ADD: - return (oparg-1) + 2; - case _SPECIALIZE_STORE_SUBSCR: + case END_FOR: return 2; - case _STORE_SUBSCR: - return 3; - case STORE_SUBSCR: - return 3; - case STORE_SUBSCR_LIST_INT: - return 3; - case STORE_SUBSCR_DICT: - return 3; - case DELETE_SUBSCR: + case END_SEND: return 2; - case CALL_INTRINSIC_1: + case ENTER_EXECUTOR: + return 0; + case EXIT_INIT_CHECK: return 1; - case CALL_INTRINSIC_2: + case EXTENDED_ARG: + return 0; + case FORMAT_SIMPLE: + return 1; + case FORMAT_WITH_SPEC: return 2; - case RAISE_VARARGS: - return oparg; - case INTERPRETER_EXIT: + case FOR_ITER: return 1; - case _POP_FRAME: + case FOR_ITER_GEN: return 1; - case RETURN_VALUE: + case FOR_ITER_LIST: return 1; - case INSTRUMENTED_RETURN_VALUE: + case FOR_ITER_RANGE: + return 1; + case FOR_ITER_TUPLE: return 1; - case RETURN_CONST: - return 0; - case INSTRUMENTED_RETURN_CONST: - return 0; case GET_AITER: return 1; case GET_ANEXT: return 1; case GET_AWAITABLE: return 1; - case _SPECIALIZE_SEND: - return 2; - case _SEND: - return 2; - case SEND: - return 2; - case SEND_GEN: - return 2; - case INSTRUMENTED_YIELD_VALUE: + case GET_ITER: return 1; - case YIELD_VALUE: + case GET_LEN: return 1; - case POP_EXCEPT: + case GET_YIELD_FROM_ITER: return 1; - case RERAISE: - return oparg + 1; - case END_ASYNC_FOR: + case IMPORT_FROM: + return 1; + case IMPORT_NAME: return 2; - case CLEANUP_THROW: + case INSTRUMENTED_CALL: + return 0; + case INSTRUMENTED_CALL_FUNCTION_EX: + return 0; + case INSTRUMENTED_CALL_KW: + return 0; + case INSTRUMENTED_END_FOR: + return 2; + case INSTRUMENTED_END_SEND: + return 2; + case INSTRUMENTED_FOR_ITER: + return 0; + case INSTRUMENTED_INSTRUCTION: + return 0; + case INSTRUMENTED_JUMP_BACKWARD: + return 0; + case INSTRUMENTED_JUMP_FORWARD: + return 0; + case INSTRUMENTED_LOAD_SUPER_ATTR: return 3; - case LOAD_ASSERTION_ERROR: + case INSTRUMENTED_POP_JUMP_IF_FALSE: return 0; - case LOAD_BUILD_CLASS: + case INSTRUMENTED_POP_JUMP_IF_NONE: return 0; - case STORE_NAME: + case INSTRUMENTED_POP_JUMP_IF_NOT_NONE: + return 0; + case INSTRUMENTED_POP_JUMP_IF_TRUE: + return 0; + case INSTRUMENTED_RESUME: + return 0; + case INSTRUMENTED_RETURN_CONST: + return 0; + case INSTRUMENTED_RETURN_VALUE: return 1; - case DELETE_NAME: + case INSTRUMENTED_YIELD_VALUE: + return 1; + case INTERPRETER_EXIT: + return 1; + case IS_OP: + return 2; + case JUMP: return 0; - case _SPECIALIZE_UNPACK_SEQUENCE: + case JUMP_BACKWARD: + return 0; + case JUMP_BACKWARD_NO_INTERRUPT: + return 0; + case JUMP_FORWARD: + return 0; + case JUMP_NO_INTERRUPT: + return 0; + case LIST_APPEND: + return (oparg-1) + 2; + case LIST_EXTEND: + return (oparg-1) + 2; + case LOAD_ASSERTION_ERROR: + return 0; + case LOAD_ATTR: return 1; - case _UNPACK_SEQUENCE: + case LOAD_ATTR_CLASS: return 1; - case UNPACK_SEQUENCE: + case LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN: return 1; - case UNPACK_SEQUENCE_TWO_TUPLE: + case LOAD_ATTR_INSTANCE_VALUE: return 1; - case UNPACK_SEQUENCE_TUPLE: + case LOAD_ATTR_METHOD_LAZY_DICT: return 1; - case UNPACK_SEQUENCE_LIST: + case LOAD_ATTR_METHOD_NO_DICT: return 1; - case UNPACK_EX: + case LOAD_ATTR_METHOD_WITH_VALUES: return 1; - case _SPECIALIZE_STORE_ATTR: + case LOAD_ATTR_MODULE: return 1; - case _STORE_ATTR: - return 2; - case STORE_ATTR: - return 2; - case DELETE_ATTR: + case LOAD_ATTR_NONDESCRIPTOR_NO_DICT: return 1; - case STORE_GLOBAL: + case LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES: return 1; - case DELETE_GLOBAL: - return 0; - case LOAD_LOCALS: - return 0; - case LOAD_FROM_DICT_OR_GLOBALS: + case LOAD_ATTR_PROPERTY: return 1; - case LOAD_NAME: + case LOAD_ATTR_SLOT: + return 1; + case LOAD_ATTR_WITH_HINT: + return 1; + case LOAD_BUILD_CLASS: return 0; - case _SPECIALIZE_LOAD_GLOBAL: + case LOAD_CLOSURE: return 0; - case _LOAD_GLOBAL: + case LOAD_CONST: return 0; - case LOAD_GLOBAL: + case LOAD_DEREF: return 0; - case _GUARD_GLOBALS_VERSION: + case LOAD_FAST: return 0; - case _GUARD_BUILTINS_VERSION: + case LOAD_FAST_AND_CLEAR: return 0; - case _LOAD_GLOBAL_MODULE: + case LOAD_FAST_CHECK: return 0; - case _LOAD_GLOBAL_BUILTINS: + case LOAD_FAST_LOAD_FAST: return 0; - case LOAD_GLOBAL_MODULE: + case LOAD_FROM_DICT_OR_DEREF: + return 1; + case LOAD_FROM_DICT_OR_GLOBALS: + return 1; + case LOAD_GLOBAL: return 0; case LOAD_GLOBAL_BUILTIN: return 0; - case DELETE_FAST: - return 0; - case MAKE_CELL: - return 0; - case DELETE_DEREF: + case LOAD_GLOBAL_MODULE: return 0; - case LOAD_FROM_DICT_OR_DEREF: - return 1; - case LOAD_DEREF: + case LOAD_LOCALS: return 0; - case STORE_DEREF: + case LOAD_METHOD: return 1; - case COPY_FREE_VARS: - return 0; - case BUILD_STRING: - return oparg; - case BUILD_TUPLE: - return oparg; - case BUILD_LIST: - return oparg; - case LIST_EXTEND: - return (oparg-1) + 2; - case SET_UPDATE: - return (oparg-1) + 2; - case BUILD_SET: - return oparg; - case BUILD_MAP: - return oparg*2; - case SETUP_ANNOTATIONS: + case LOAD_NAME: return 0; - case BUILD_CONST_KEY_MAP: - return oparg + 1; - case DICT_UPDATE: - return (oparg - 1) + 2; - case DICT_MERGE: - return (oparg - 1) + 5; - case MAP_ADD: - return (oparg - 1) + 3; - case INSTRUMENTED_LOAD_SUPER_ATTR: - return 3; - case _SPECIALIZE_LOAD_SUPER_ATTR: + case LOAD_SUPER_ATTR: return 3; - case _LOAD_SUPER_ATTR: + case LOAD_SUPER_ATTR_ATTR: return 3; - case LOAD_SUPER_ATTR: + case LOAD_SUPER_ATTR_METHOD: return 3; case LOAD_SUPER_METHOD: return 3; - case LOAD_ZERO_SUPER_METHOD: - return 3; case LOAD_ZERO_SUPER_ATTR: return 3; - case LOAD_SUPER_ATTR_ATTR: - return 3; - case LOAD_SUPER_ATTR_METHOD: + case LOAD_ZERO_SUPER_METHOD: return 3; - case _SPECIALIZE_LOAD_ATTR: + case MAKE_CELL: + return 0; + case MAKE_FUNCTION: return 1; - case _LOAD_ATTR: + case MAP_ADD: + return (oparg - 1) + 3; + case MATCH_CLASS: + return 3; + case MATCH_KEYS: + return 2; + case MATCH_MAPPING: return 1; - case LOAD_ATTR: + case MATCH_SEQUENCE: return 1; - case LOAD_METHOD: + case NOP: + return 0; + case POP_BLOCK: + return 0; + case POP_EXCEPT: return 1; - case _GUARD_TYPE_VERSION: + case POP_JUMP_IF_FALSE: return 1; - case _CHECK_MANAGED_OBJECT_HAS_VALUES: + case POP_JUMP_IF_NONE: return 1; - case _LOAD_ATTR_INSTANCE_VALUE: + case POP_JUMP_IF_NOT_NONE: return 1; - case LOAD_ATTR_INSTANCE_VALUE: + case POP_JUMP_IF_TRUE: return 1; - case _CHECK_ATTR_MODULE: + case POP_TOP: return 1; - case _LOAD_ATTR_MODULE: + case PUSH_EXC_INFO: return 1; - case LOAD_ATTR_MODULE: + case PUSH_NULL: + return 0; + case RAISE_VARARGS: + return oparg; + case RERAISE: + return oparg + 1; + case RESERVED: + return 0; + case RESUME: + return 0; + case RESUME_CHECK: + return 0; + case RETURN_CONST: + return 0; + case RETURN_GENERATOR: + return 0; + case RETURN_VALUE: return 1; - case _CHECK_ATTR_WITH_HINT: + case SEND: + return 2; + case SEND_GEN: + return 2; + case SETUP_ANNOTATIONS: + return 0; + case SETUP_CLEANUP: + return 0; + case SETUP_FINALLY: + return 0; + case SETUP_WITH: + return 0; + case SET_ADD: + return (oparg-1) + 2; + case SET_FUNCTION_ATTRIBUTE: + return 2; + case SET_UPDATE: + return (oparg-1) + 2; + case STORE_ATTR: + return 2; + case STORE_ATTR_INSTANCE_VALUE: + return 2; + case STORE_ATTR_SLOT: + return 2; + case STORE_ATTR_WITH_HINT: + return 2; + case STORE_DEREF: return 1; - case _LOAD_ATTR_WITH_HINT: + case STORE_FAST: return 1; - case LOAD_ATTR_WITH_HINT: + case STORE_FAST_LOAD_FAST: return 1; - case _LOAD_ATTR_SLOT: + case STORE_FAST_MAYBE_NULL: return 1; - case LOAD_ATTR_SLOT: + case STORE_FAST_STORE_FAST: + return 2; + case STORE_GLOBAL: return 1; - case _CHECK_ATTR_CLASS: + case STORE_NAME: return 1; - case _LOAD_ATTR_CLASS: + case STORE_SLICE: + return 4; + case STORE_SUBSCR: + return 3; + case STORE_SUBSCR_DICT: + return 3; + case STORE_SUBSCR_LIST_INT: + return 3; + case SWAP: + return (oparg-2) + 2; + case TO_BOOL: return 1; - case LOAD_ATTR_CLASS: + case TO_BOOL_ALWAYS_TRUE: return 1; - case LOAD_ATTR_PROPERTY: + case TO_BOOL_BOOL: return 1; - case LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN: + case TO_BOOL_INT: return 1; - case _GUARD_DORV_VALUES: + case TO_BOOL_LIST: return 1; - case _STORE_ATTR_INSTANCE_VALUE: - return 2; - case STORE_ATTR_INSTANCE_VALUE: - return 2; - case STORE_ATTR_WITH_HINT: - return 2; - case _STORE_ATTR_SLOT: - return 2; - case STORE_ATTR_SLOT: - return 2; - case _SPECIALIZE_COMPARE_OP: - return 2; - case _COMPARE_OP: + case TO_BOOL_NONE: + return 1; + case TO_BOOL_STR: + return 1; + case UNARY_INVERT: + return 1; + case UNARY_NEGATIVE: + return 1; + case UNARY_NOT: + return 1; + case UNPACK_EX: + return 1; + case UNPACK_SEQUENCE: + return 1; + case UNPACK_SEQUENCE_LIST: + return 1; + case UNPACK_SEQUENCE_TUPLE: + return 1; + case UNPACK_SEQUENCE_TWO_TUPLE: + return 1; + case WITH_EXCEPT_START: + return 4; + case YIELD_VALUE: + return 1; + case _BINARY_OP: return 2; - case COMPARE_OP: + case _BINARY_OP_ADD_FLOAT: return 2; - case COMPARE_OP_FLOAT: + case _BINARY_OP_ADD_INT: return 2; - case COMPARE_OP_INT: + case _BINARY_OP_ADD_UNICODE: return 2; - case COMPARE_OP_STR: + case _BINARY_OP_INPLACE_ADD_UNICODE: return 2; - case IS_OP: + case _BINARY_OP_MULTIPLY_FLOAT: return 2; - case CONTAINS_OP: + case _BINARY_OP_MULTIPLY_INT: return 2; - case CHECK_EG_MATCH: + case _BINARY_OP_SUBTRACT_FLOAT: return 2; - case CHECK_EXC_MATCH: + case _BINARY_OP_SUBTRACT_INT: return 2; - case IMPORT_NAME: + case _BINARY_SUBSCR: return 2; - case IMPORT_FROM: - return 1; - case JUMP_FORWARD: - return 0; - case JUMP_BACKWARD: - return 0; - case JUMP: - return 0; - case JUMP_NO_INTERRUPT: - return 0; - case ENTER_EXECUTOR: - return 0; - case _POP_JUMP_IF_FALSE: - return 1; - case _POP_JUMP_IF_TRUE: - return 1; - case _IS_NONE: + case _CALL: + return oparg + 2; + case _CHECK_ATTR_CLASS: return 1; - case POP_JUMP_IF_TRUE: + case _CHECK_ATTR_METHOD_LAZY_DICT: return 1; - case POP_JUMP_IF_FALSE: + case _CHECK_ATTR_MODULE: return 1; - case POP_JUMP_IF_NONE: + case _CHECK_ATTR_WITH_HINT: return 1; - case POP_JUMP_IF_NOT_NONE: + case _CHECK_CALL_BOUND_METHOD_EXACT_ARGS: + return oparg + 2; + case _CHECK_FUNCTION_EXACT_ARGS: + return oparg + 2; + case _CHECK_MANAGED_OBJECT_HAS_VALUES: return 1; - case JUMP_BACKWARD_NO_INTERRUPT: + case _CHECK_PEP_523: return 0; - case GET_LEN: - return 1; - case MATCH_CLASS: - return 3; - case MATCH_MAPPING: - return 1; - case MATCH_SEQUENCE: - return 1; - case MATCH_KEYS: + case _CHECK_STACK_SPACE: + return oparg + 2; + case _CHECK_VALIDITY: + return 0; + case _COMPARE_OP: return 2; - case GET_ITER: - return 1; - case GET_YIELD_FROM_ITER: - return 1; - case _SPECIALIZE_FOR_ITER: - return 1; + case _EXIT_TRACE: + return 0; case _FOR_ITER: return 1; case _FOR_ITER_TIER_TWO: return 1; - case FOR_ITER: + case _GUARD_BOTH_FLOAT: + return 2; + case _GUARD_BOTH_INT: + return 2; + case _GUARD_BOTH_UNICODE: + return 2; + case _GUARD_BUILTINS_VERSION: + return 0; + case _GUARD_DORV_VALUES: return 1; - case INSTRUMENTED_FOR_ITER: + case _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT: + return 1; + case _GUARD_GLOBALS_VERSION: return 0; - case _ITER_CHECK_LIST: + case _GUARD_IS_FALSE_POP: return 1; - case _ITER_JUMP_LIST: + case _GUARD_IS_NONE_POP: return 1; - case _GUARD_NOT_EXHAUSTED_LIST: + case _GUARD_IS_NOT_NONE_POP: return 1; - case _ITER_NEXT_LIST: + case _GUARD_IS_TRUE_POP: return 1; - case FOR_ITER_LIST: + case _GUARD_KEYS_VERSION: return 1; - case _ITER_CHECK_TUPLE: + case _GUARD_NOT_EXHAUSTED_LIST: return 1; - case _ITER_JUMP_TUPLE: + case _GUARD_NOT_EXHAUSTED_RANGE: return 1; case _GUARD_NOT_EXHAUSTED_TUPLE: return 1; - case _ITER_NEXT_TUPLE: + case _GUARD_TYPE_VERSION: return 1; - case FOR_ITER_TUPLE: + case _INIT_CALL_BOUND_METHOD_EXACT_ARGS: + return oparg + 2; + case _INIT_CALL_PY_EXACT_ARGS: + return oparg + 2; + case _INSERT: + return oparg + 1; + case _IS_NONE: + return 1; + case _ITER_CHECK_LIST: return 1; case _ITER_CHECK_RANGE: return 1; - case _ITER_JUMP_RANGE: + case _ITER_CHECK_TUPLE: return 1; - case _GUARD_NOT_EXHAUSTED_RANGE: + case _ITER_JUMP_LIST: return 1; - case _ITER_NEXT_RANGE: + case _ITER_JUMP_RANGE: return 1; - case FOR_ITER_RANGE: + case _ITER_JUMP_TUPLE: return 1; - case FOR_ITER_GEN: + case _ITER_NEXT_LIST: return 1; - case BEFORE_ASYNC_WITH: + case _ITER_NEXT_RANGE: return 1; - case BEFORE_WITH: + case _ITER_NEXT_TUPLE: return 1; - case WITH_EXCEPT_START: - return 4; - case SETUP_FINALLY: - return 0; - case SETUP_CLEANUP: - return 0; - case SETUP_WITH: - return 0; - case POP_BLOCK: + case _JUMP_TO_TOP: return 0; - case PUSH_EXC_INFO: - return 1; - case _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT: + case _LOAD_ATTR: return 1; - case _GUARD_KEYS_VERSION: + case _LOAD_ATTR_CLASS: return 1; - case _LOAD_ATTR_METHOD_WITH_VALUES: + case _LOAD_ATTR_INSTANCE_VALUE: return 1; - case LOAD_ATTR_METHOD_WITH_VALUES: + case _LOAD_ATTR_METHOD_LAZY_DICT: return 1; case _LOAD_ATTR_METHOD_NO_DICT: return 1; - case LOAD_ATTR_METHOD_NO_DICT: + case _LOAD_ATTR_METHOD_WITH_VALUES: + return 1; + case _LOAD_ATTR_MODULE: + return 1; + case _LOAD_ATTR_NONDESCRIPTOR_NO_DICT: return 1; case _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES: return 1; - case LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES: + case _LOAD_ATTR_SLOT: return 1; - case _LOAD_ATTR_NONDESCRIPTOR_NO_DICT: + case _LOAD_ATTR_WITH_HINT: return 1; - case LOAD_ATTR_NONDESCRIPTOR_NO_DICT: + case _LOAD_GLOBAL: + return 0; + case _LOAD_GLOBAL_BUILTINS: + return 0; + case _LOAD_GLOBAL_MODULE: + return 0; + case _LOAD_SUPER_ATTR: + return 3; + case _POP_FRAME: return 1; - case _CHECK_ATTR_METHOD_LAZY_DICT: + case _POP_JUMP_IF_FALSE: return 1; - case _LOAD_ATTR_METHOD_LAZY_DICT: + case _POP_JUMP_IF_TRUE: return 1; - case LOAD_ATTR_METHOD_LAZY_DICT: + case _PUSH_FRAME: return 1; - case INSTRUMENTED_CALL: + case _SAVE_RETURN_OFFSET: return 0; + case _SEND: + return 2; + case _SET_IP: + return 0; + case _SPECIALIZE_BINARY_OP: + return 2; + case _SPECIALIZE_BINARY_SUBSCR: + return 2; case _SPECIALIZE_CALL: return oparg + 2; - case _CALL: - return oparg + 2; - case CALL: - return oparg + 2; - case _CHECK_CALL_BOUND_METHOD_EXACT_ARGS: - return oparg + 2; - case _INIT_CALL_BOUND_METHOD_EXACT_ARGS: - return oparg + 2; - case _CHECK_PEP_523: - return 0; - case _CHECK_FUNCTION_EXACT_ARGS: - return oparg + 2; - case _CHECK_STACK_SPACE: - return oparg + 2; - case _INIT_CALL_PY_EXACT_ARGS: - return oparg + 2; - case _PUSH_FRAME: + case _SPECIALIZE_COMPARE_OP: + return 2; + case _SPECIALIZE_FOR_ITER: return 1; - case CALL_BOUND_METHOD_EXACT_ARGS: - return oparg + 2; - case CALL_PY_EXACT_ARGS: - return oparg + 2; - case CALL_PY_WITH_DEFAULTS: - return oparg + 2; - case CALL_TYPE_1: - return oparg + 2; - case CALL_STR_1: - return oparg + 2; - case CALL_TUPLE_1: - return oparg + 2; - case CALL_ALLOC_AND_ENTER_INIT: - return oparg + 2; - case EXIT_INIT_CHECK: + case _SPECIALIZE_LOAD_ATTR: return 1; - case CALL_BUILTIN_CLASS: - return oparg + 2; - case CALL_BUILTIN_O: - return oparg + 2; - case CALL_BUILTIN_FAST: - return oparg + 2; - case CALL_BUILTIN_FAST_WITH_KEYWORDS: - return oparg + 2; - case CALL_LEN: - return oparg + 2; - case CALL_ISINSTANCE: - return oparg + 2; - case CALL_LIST_APPEND: - return oparg + 2; - case CALL_METHOD_DESCRIPTOR_O: - return oparg + 2; - case CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS: - return oparg + 2; - case CALL_METHOD_DESCRIPTOR_NOARGS: - return oparg + 2; - case CALL_METHOD_DESCRIPTOR_FAST: - return oparg + 2; - case INSTRUMENTED_CALL_KW: - return 0; - case CALL_KW: - return oparg + 3; - case INSTRUMENTED_CALL_FUNCTION_EX: + case _SPECIALIZE_LOAD_GLOBAL: return 0; - case CALL_FUNCTION_EX: - return ((oparg & 1) ? 1 : 0) + 3; - case MAKE_FUNCTION: + case _SPECIALIZE_LOAD_SUPER_ATTR: + return 3; + case _SPECIALIZE_SEND: + return 2; + case _SPECIALIZE_STORE_ATTR: return 1; - case SET_FUNCTION_ATTRIBUTE: + case _SPECIALIZE_STORE_SUBSCR: return 2; - case RETURN_GENERATOR: - return 0; - case BUILD_SLICE: - return ((oparg == 3) ? 1 : 0) + 2; - case CONVERT_VALUE: + case _SPECIALIZE_TO_BOOL: return 1; - case FORMAT_SIMPLE: + case _SPECIALIZE_UNPACK_SEQUENCE: return 1; - case FORMAT_WITH_SPEC: - return 2; - case COPY: - return (oparg-1) + 1; - case _SPECIALIZE_BINARY_OP: + case _STORE_ATTR: return 2; - case _BINARY_OP: + case _STORE_ATTR_INSTANCE_VALUE: return 2; - case BINARY_OP: + case _STORE_ATTR_SLOT: return 2; - case SWAP: - return (oparg-2) + 2; - case INSTRUMENTED_INSTRUCTION: - return 0; - case INSTRUMENTED_JUMP_FORWARD: - return 0; - case INSTRUMENTED_JUMP_BACKWARD: - return 0; - case INSTRUMENTED_POP_JUMP_IF_TRUE: - return 0; - case INSTRUMENTED_POP_JUMP_IF_FALSE: - return 0; - case INSTRUMENTED_POP_JUMP_IF_NONE: - return 0; - case INSTRUMENTED_POP_JUMP_IF_NOT_NONE: - return 0; - case EXTENDED_ARG: - return 0; - case CACHE: - return 0; - case RESERVED: - return 0; - case _GUARD_IS_TRUE_POP: - return 1; - case _GUARD_IS_FALSE_POP: - return 1; - case _GUARD_IS_NONE_POP: + case _STORE_SUBSCR: + return 3; + case _TO_BOOL: return 1; - case _GUARD_IS_NOT_NONE_POP: + case _UNPACK_SEQUENCE: return 1; - case _JUMP_TO_TOP: - return 0; - case _SET_IP: - return 0; - case _SAVE_RETURN_OFFSET: - return 0; - case _EXIT_TRACE: - return 0; - case _INSERT: - return oparg + 1; - case _CHECK_VALIDITY: - return 0; default: return -1; } @@ -671,636 +671,636 @@ extern int _PyOpcode_num_pushed(int opcode, int oparg, bool jump); #ifdef NEED_OPCODE_METADATA int _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { switch(opcode) { - case NOP: - return 0; - case RESUME: - return 0; - case RESUME_CHECK: - return 0; - case INSTRUMENTED_RESUME: - return 0; - case LOAD_CLOSURE: - return 1; - case LOAD_FAST_CHECK: + case BEFORE_ASYNC_WITH: + return 2; + case BEFORE_WITH: + return 2; + case BINARY_OP: return 1; - case LOAD_FAST: + case BINARY_OP_ADD_FLOAT: return 1; - case LOAD_FAST_AND_CLEAR: + case BINARY_OP_ADD_INT: return 1; - case LOAD_FAST_LOAD_FAST: - return 2; - case LOAD_CONST: + case BINARY_OP_ADD_UNICODE: return 1; - case STORE_FAST: - return 0; - case STORE_FAST_MAYBE_NULL: + case BINARY_OP_INPLACE_ADD_UNICODE: return 0; - case STORE_FAST_LOAD_FAST: + case BINARY_OP_MULTIPLY_FLOAT: return 1; - case STORE_FAST_STORE_FAST: - return 0; - case POP_TOP: - return 0; - case PUSH_NULL: + case BINARY_OP_MULTIPLY_INT: return 1; - case END_FOR: - return 0; - case INSTRUMENTED_END_FOR: - return 0; - case END_SEND: + case BINARY_OP_SUBTRACT_FLOAT: return 1; - case INSTRUMENTED_END_SEND: + case BINARY_OP_SUBTRACT_INT: return 1; - case UNARY_NEGATIVE: + case BINARY_SLICE: return 1; - case UNARY_NOT: + case BINARY_SUBSCR: return 1; - case _SPECIALIZE_TO_BOOL: + case BINARY_SUBSCR_DICT: return 1; - case _TO_BOOL: + case BINARY_SUBSCR_GETITEM: return 1; - case TO_BOOL: + case BINARY_SUBSCR_LIST_INT: return 1; - case TO_BOOL_BOOL: + case BINARY_SUBSCR_STR_INT: return 1; - case TO_BOOL_INT: + case BINARY_SUBSCR_TUPLE_INT: return 1; - case TO_BOOL_LIST: + case BUILD_CONST_KEY_MAP: return 1; - case TO_BOOL_NONE: + case BUILD_LIST: return 1; - case TO_BOOL_STR: + case BUILD_MAP: return 1; - case TO_BOOL_ALWAYS_TRUE: + case BUILD_SET: return 1; - case UNARY_INVERT: + case BUILD_SLICE: return 1; - case _GUARD_BOTH_INT: - return 2; - case _BINARY_OP_MULTIPLY_INT: + case BUILD_STRING: return 1; - case _BINARY_OP_ADD_INT: + case BUILD_TUPLE: return 1; - case _BINARY_OP_SUBTRACT_INT: + case CACHE: + return 0; + case CALL: return 1; - case BINARY_OP_MULTIPLY_INT: + case CALL_ALLOC_AND_ENTER_INIT: return 1; - case BINARY_OP_ADD_INT: + case CALL_BOUND_METHOD_EXACT_ARGS: + return 0; + case CALL_BUILTIN_CLASS: return 1; - case BINARY_OP_SUBTRACT_INT: + case CALL_BUILTIN_FAST: return 1; - case _GUARD_BOTH_FLOAT: - return 2; - case _BINARY_OP_MULTIPLY_FLOAT: + case CALL_BUILTIN_FAST_WITH_KEYWORDS: return 1; - case _BINARY_OP_ADD_FLOAT: + case CALL_BUILTIN_O: return 1; - case _BINARY_OP_SUBTRACT_FLOAT: + case CALL_FUNCTION_EX: return 1; - case BINARY_OP_MULTIPLY_FLOAT: + case CALL_INTRINSIC_1: return 1; - case BINARY_OP_ADD_FLOAT: + case CALL_INTRINSIC_2: return 1; - case BINARY_OP_SUBTRACT_FLOAT: + case CALL_ISINSTANCE: return 1; - case _GUARD_BOTH_UNICODE: - return 2; - case _BINARY_OP_ADD_UNICODE: + case CALL_KW: return 1; - case BINARY_OP_ADD_UNICODE: + case CALL_LEN: return 1; - case _BINARY_OP_INPLACE_ADD_UNICODE: - return 0; - case BINARY_OP_INPLACE_ADD_UNICODE: - return 0; - case _SPECIALIZE_BINARY_SUBSCR: - return 2; - case _BINARY_SUBSCR: + case CALL_LIST_APPEND: return 1; - case BINARY_SUBSCR: + case CALL_METHOD_DESCRIPTOR_FAST: return 1; - case BINARY_SLICE: + case CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS: return 1; - case STORE_SLICE: - return 0; - case BINARY_SUBSCR_LIST_INT: + case CALL_METHOD_DESCRIPTOR_NOARGS: return 1; - case BINARY_SUBSCR_STR_INT: + case CALL_METHOD_DESCRIPTOR_O: return 1; - case BINARY_SUBSCR_TUPLE_INT: + case CALL_PY_EXACT_ARGS: + return 0; + case CALL_PY_WITH_DEFAULTS: return 1; - case BINARY_SUBSCR_DICT: + case CALL_STR_1: return 1; - case BINARY_SUBSCR_GETITEM: + case CALL_TUPLE_1: return 1; - case LIST_APPEND: - return (oparg-1) + 1; - case SET_ADD: - return (oparg-1) + 1; - case _SPECIALIZE_STORE_SUBSCR: + case CALL_TYPE_1: + return 1; + case CHECK_EG_MATCH: return 2; - case _STORE_SUBSCR: + case CHECK_EXC_MATCH: + return 2; + case CLEANUP_THROW: + return 2; + case COMPARE_OP: + return 1; + case COMPARE_OP_FLOAT: + return 1; + case COMPARE_OP_INT: + return 1; + case COMPARE_OP_STR: + return 1; + case CONTAINS_OP: + return 1; + case CONVERT_VALUE: + return 1; + case COPY: + return (oparg-1) + 2; + case COPY_FREE_VARS: return 0; - case STORE_SUBSCR: + case DELETE_ATTR: return 0; - case STORE_SUBSCR_LIST_INT: + case DELETE_DEREF: return 0; - case STORE_SUBSCR_DICT: + case DELETE_FAST: return 0; - case DELETE_SUBSCR: + case DELETE_GLOBAL: return 0; - case CALL_INTRINSIC_1: - return 1; - case CALL_INTRINSIC_2: - return 1; - case RAISE_VARARGS: + case DELETE_NAME: return 0; - case INTERPRETER_EXIT: + case DELETE_SUBSCR: return 0; - case _POP_FRAME: + case DICT_MERGE: + return (oparg - 1) + 4; + case DICT_UPDATE: + return (oparg - 1) + 1; + case END_ASYNC_FOR: return 0; - case RETURN_VALUE: + case END_FOR: return 0; - case INSTRUMENTED_RETURN_VALUE: + case END_SEND: + return 1; + case ENTER_EXECUTOR: return 0; - case RETURN_CONST: + case EXIT_INIT_CHECK: return 0; - case INSTRUMENTED_RETURN_CONST: + case EXTENDED_ARG: return 0; - case GET_AITER: + case FORMAT_SIMPLE: return 1; - case GET_ANEXT: - return 2; - case GET_AWAITABLE: + case FORMAT_WITH_SPEC: return 1; - case _SPECIALIZE_SEND: + case FOR_ITER: return 2; - case _SEND: + case FOR_ITER_GEN: return 2; - case SEND: + case FOR_ITER_LIST: return 2; - case SEND_GEN: + case FOR_ITER_RANGE: return 2; - case INSTRUMENTED_YIELD_VALUE: + case FOR_ITER_TUPLE: + return 2; + case GET_AITER: return 1; - case YIELD_VALUE: + case GET_ANEXT: + return 2; + case GET_AWAITABLE: return 1; - case POP_EXCEPT: - return 0; - case RERAISE: - return oparg; - case END_ASYNC_FOR: - return 0; - case CLEANUP_THROW: + case GET_ITER: + return 1; + case GET_LEN: return 2; - case LOAD_ASSERTION_ERROR: + case GET_YIELD_FROM_ITER: return 1; - case LOAD_BUILD_CLASS: + case IMPORT_FROM: + return 2; + case IMPORT_NAME: return 1; - case STORE_NAME: + case INSTRUMENTED_CALL: return 0; - case DELETE_NAME: + case INSTRUMENTED_CALL_FUNCTION_EX: return 0; - case _SPECIALIZE_UNPACK_SEQUENCE: - return 1; - case _UNPACK_SEQUENCE: - return oparg; - case UNPACK_SEQUENCE: - return oparg; - case UNPACK_SEQUENCE_TWO_TUPLE: - return oparg; - case UNPACK_SEQUENCE_TUPLE: - return oparg; - case UNPACK_SEQUENCE_LIST: - return oparg; - case UNPACK_EX: - return (oparg & 0xFF) + (oparg >> 8) + 1; - case _SPECIALIZE_STORE_ATTR: - return 1; - case _STORE_ATTR: + case INSTRUMENTED_CALL_KW: return 0; - case STORE_ATTR: + case INSTRUMENTED_END_FOR: return 0; - case DELETE_ATTR: + case INSTRUMENTED_END_SEND: + return 1; + case INSTRUMENTED_FOR_ITER: return 0; - case STORE_GLOBAL: + case INSTRUMENTED_INSTRUCTION: return 0; - case DELETE_GLOBAL: + case INSTRUMENTED_JUMP_BACKWARD: return 0; - case LOAD_LOCALS: - return 1; - case LOAD_FROM_DICT_OR_GLOBALS: - return 1; - case LOAD_NAME: - return 1; - case _SPECIALIZE_LOAD_GLOBAL: + case INSTRUMENTED_JUMP_FORWARD: return 0; - case _LOAD_GLOBAL: + case INSTRUMENTED_LOAD_SUPER_ATTR: return ((oparg & 1) ? 1 : 0) + 1; - case LOAD_GLOBAL: - return (oparg & 1 ? 1 : 0) + 1; - case _GUARD_GLOBALS_VERSION: + case INSTRUMENTED_POP_JUMP_IF_FALSE: return 0; - case _GUARD_BUILTINS_VERSION: + case INSTRUMENTED_POP_JUMP_IF_NONE: return 0; - case _LOAD_GLOBAL_MODULE: - return ((oparg & 1) ? 1 : 0) + 1; - case _LOAD_GLOBAL_BUILTINS: - return ((oparg & 1) ? 1 : 0) + 1; - case LOAD_GLOBAL_MODULE: - return (oparg & 1 ? 1 : 0) + 1; - case LOAD_GLOBAL_BUILTIN: - return (oparg & 1 ? 1 : 0) + 1; - case DELETE_FAST: + case INSTRUMENTED_POP_JUMP_IF_NOT_NONE: return 0; - case MAKE_CELL: + case INSTRUMENTED_POP_JUMP_IF_TRUE: return 0; - case DELETE_DEREF: + case INSTRUMENTED_RESUME: return 0; - case LOAD_FROM_DICT_OR_DEREF: - return 1; - case LOAD_DEREF: - return 1; - case STORE_DEREF: + case INSTRUMENTED_RETURN_CONST: return 0; - case COPY_FREE_VARS: + case INSTRUMENTED_RETURN_VALUE: return 0; - case BUILD_STRING: - return 1; - case BUILD_TUPLE: + case INSTRUMENTED_YIELD_VALUE: return 1; - case BUILD_LIST: + case INTERPRETER_EXIT: + return 0; + case IS_OP: return 1; - case LIST_EXTEND: + case JUMP: + return 0; + case JUMP_BACKWARD: + return 0; + case JUMP_BACKWARD_NO_INTERRUPT: + return 0; + case JUMP_FORWARD: + return 0; + case JUMP_NO_INTERRUPT: + return 0; + case LIST_APPEND: return (oparg-1) + 1; - case SET_UPDATE: + case LIST_EXTEND: return (oparg-1) + 1; - case BUILD_SET: - return 1; - case BUILD_MAP: - return 1; - case SETUP_ANNOTATIONS: - return 0; - case BUILD_CONST_KEY_MAP: + case LOAD_ASSERTION_ERROR: return 1; - case DICT_UPDATE: - return (oparg - 1) + 1; - case DICT_MERGE: - return (oparg - 1) + 4; - case MAP_ADD: - return (oparg - 1) + 1; - case INSTRUMENTED_LOAD_SUPER_ATTR: - return ((oparg & 1) ? 1 : 0) + 1; - case _SPECIALIZE_LOAD_SUPER_ATTR: - return 3; - case _LOAD_SUPER_ATTR: - return ((oparg & 1) ? 1 : 0) + 1; - case LOAD_SUPER_ATTR: + case LOAD_ATTR: return (oparg & 1 ? 1 : 0) + 1; - case LOAD_SUPER_METHOD: + case LOAD_ATTR_CLASS: return (oparg & 1 ? 1 : 0) + 1; - case LOAD_ZERO_SUPER_METHOD: + case LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN: + return 1; + case LOAD_ATTR_INSTANCE_VALUE: return (oparg & 1 ? 1 : 0) + 1; - case LOAD_ZERO_SUPER_ATTR: + case LOAD_ATTR_METHOD_LAZY_DICT: + return 2; + case LOAD_ATTR_METHOD_NO_DICT: + return 2; + case LOAD_ATTR_METHOD_WITH_VALUES: + return 2; + case LOAD_ATTR_MODULE: return (oparg & 1 ? 1 : 0) + 1; - case LOAD_SUPER_ATTR_ATTR: + case LOAD_ATTR_NONDESCRIPTOR_NO_DICT: return 1; - case LOAD_SUPER_ATTR_METHOD: - return 2; - case _SPECIALIZE_LOAD_ATTR: + case LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES: return 1; - case _LOAD_ATTR: - return ((oparg & 1) ? 1 : 0) + 1; - case LOAD_ATTR: + case LOAD_ATTR_PROPERTY: + return 1; + case LOAD_ATTR_SLOT: return (oparg & 1 ? 1 : 0) + 1; - case LOAD_METHOD: + case LOAD_ATTR_WITH_HINT: return (oparg & 1 ? 1 : 0) + 1; - case _GUARD_TYPE_VERSION: + case LOAD_BUILD_CLASS: return 1; - case _CHECK_MANAGED_OBJECT_HAS_VALUES: + case LOAD_CLOSURE: return 1; - case _LOAD_ATTR_INSTANCE_VALUE: - return ((oparg & 1) ? 1 : 0) + 1; - case LOAD_ATTR_INSTANCE_VALUE: - return (oparg & 1 ? 1 : 0) + 1; - case _CHECK_ATTR_MODULE: + case LOAD_CONST: return 1; - case _LOAD_ATTR_MODULE: - return ((oparg & 1) ? 1 : 0) + 1; - case LOAD_ATTR_MODULE: - return (oparg & 1 ? 1 : 0) + 1; - case _CHECK_ATTR_WITH_HINT: - return 1; - case _LOAD_ATTR_WITH_HINT: - return ((oparg & 1) ? 1 : 0) + 1; - case LOAD_ATTR_WITH_HINT: - return (oparg & 1 ? 1 : 0) + 1; - case _LOAD_ATTR_SLOT: - return ((oparg & 1) ? 1 : 0) + 1; - case LOAD_ATTR_SLOT: - return (oparg & 1 ? 1 : 0) + 1; - case _CHECK_ATTR_CLASS: + case LOAD_DEREF: return 1; - case _LOAD_ATTR_CLASS: - return ((oparg & 1) ? 1 : 0) + 1; - case LOAD_ATTR_CLASS: - return (oparg & 1 ? 1 : 0) + 1; - case LOAD_ATTR_PROPERTY: + case LOAD_FAST: return 1; - case LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN: + case LOAD_FAST_AND_CLEAR: return 1; - case _GUARD_DORV_VALUES: + case LOAD_FAST_CHECK: return 1; - case _STORE_ATTR_INSTANCE_VALUE: - return 0; - case STORE_ATTR_INSTANCE_VALUE: - return 0; - case STORE_ATTR_WITH_HINT: - return 0; - case _STORE_ATTR_SLOT: - return 0; - case STORE_ATTR_SLOT: - return 0; - case _SPECIALIZE_COMPARE_OP: + case LOAD_FAST_LOAD_FAST: return 2; - case _COMPARE_OP: + case LOAD_FROM_DICT_OR_DEREF: return 1; - case COMPARE_OP: + case LOAD_FROM_DICT_OR_GLOBALS: return 1; - case COMPARE_OP_FLOAT: + case LOAD_GLOBAL: + return (oparg & 1 ? 1 : 0) + 1; + case LOAD_GLOBAL_BUILTIN: + return (oparg & 1 ? 1 : 0) + 1; + case LOAD_GLOBAL_MODULE: + return (oparg & 1 ? 1 : 0) + 1; + case LOAD_LOCALS: return 1; - case COMPARE_OP_INT: + case LOAD_METHOD: + return (oparg & 1 ? 1 : 0) + 1; + case LOAD_NAME: return 1; - case COMPARE_OP_STR: + case LOAD_SUPER_ATTR: + return (oparg & 1 ? 1 : 0) + 1; + case LOAD_SUPER_ATTR_ATTR: return 1; - case IS_OP: + case LOAD_SUPER_ATTR_METHOD: + return 2; + case LOAD_SUPER_METHOD: + return (oparg & 1 ? 1 : 0) + 1; + case LOAD_ZERO_SUPER_ATTR: + return (oparg & 1 ? 1 : 0) + 1; + case LOAD_ZERO_SUPER_METHOD: + return (oparg & 1 ? 1 : 0) + 1; + case MAKE_CELL: + return 0; + case MAKE_FUNCTION: return 1; - case CONTAINS_OP: + case MAP_ADD: + return (oparg - 1) + 1; + case MATCH_CLASS: return 1; - case CHECK_EG_MATCH: - return 2; - case CHECK_EXC_MATCH: + case MATCH_KEYS: + return 3; + case MATCH_MAPPING: return 2; - case IMPORT_NAME: - return 1; - case IMPORT_FROM: + case MATCH_SEQUENCE: return 2; - case JUMP_FORWARD: + case NOP: return 0; - case JUMP_BACKWARD: + case POP_BLOCK: return 0; - case JUMP: + case POP_EXCEPT: return 0; - case JUMP_NO_INTERRUPT: + case POP_JUMP_IF_FALSE: return 0; - case ENTER_EXECUTOR: + case POP_JUMP_IF_NONE: return 0; - case _POP_JUMP_IF_FALSE: + case POP_JUMP_IF_NOT_NONE: return 0; - case _POP_JUMP_IF_TRUE: + case POP_JUMP_IF_TRUE: return 0; - case _IS_NONE: + case POP_TOP: + return 0; + case PUSH_EXC_INFO: + return 2; + case PUSH_NULL: return 1; - case POP_JUMP_IF_TRUE: + case RAISE_VARARGS: return 0; - case POP_JUMP_IF_FALSE: + case RERAISE: + return oparg; + case RESERVED: return 0; - case POP_JUMP_IF_NONE: + case RESUME: return 0; - case POP_JUMP_IF_NOT_NONE: + case RESUME_CHECK: return 0; - case JUMP_BACKWARD_NO_INTERRUPT: + case RETURN_CONST: return 0; - case GET_LEN: - return 2; - case MATCH_CLASS: - return 1; - case MATCH_MAPPING: + case RETURN_GENERATOR: + return 0; + case RETURN_VALUE: + return 0; + case SEND: return 2; - case MATCH_SEQUENCE: + case SEND_GEN: return 2; - case MATCH_KEYS: - return 3; - case GET_ITER: - return 1; - case GET_YIELD_FROM_ITER: + case SETUP_ANNOTATIONS: + return 0; + case SETUP_CLEANUP: + return 0; + case SETUP_FINALLY: + return 0; + case SETUP_WITH: + return 0; + case SET_ADD: + return (oparg-1) + 1; + case SET_FUNCTION_ATTRIBUTE: return 1; - case _SPECIALIZE_FOR_ITER: + case SET_UPDATE: + return (oparg-1) + 1; + case STORE_ATTR: + return 0; + case STORE_ATTR_INSTANCE_VALUE: + return 0; + case STORE_ATTR_SLOT: + return 0; + case STORE_ATTR_WITH_HINT: + return 0; + case STORE_DEREF: + return 0; + case STORE_FAST: + return 0; + case STORE_FAST_LOAD_FAST: return 1; - case _FOR_ITER: - return 2; - case _FOR_ITER_TIER_TWO: - return 2; - case FOR_ITER: - return 2; - case INSTRUMENTED_FOR_ITER: + case STORE_FAST_MAYBE_NULL: return 0; - case _ITER_CHECK_LIST: + case STORE_FAST_STORE_FAST: + return 0; + case STORE_GLOBAL: + return 0; + case STORE_NAME: + return 0; + case STORE_SLICE: + return 0; + case STORE_SUBSCR: + return 0; + case STORE_SUBSCR_DICT: + return 0; + case STORE_SUBSCR_LIST_INT: + return 0; + case SWAP: + return (oparg-2) + 2; + case TO_BOOL: return 1; - case _ITER_JUMP_LIST: + case TO_BOOL_ALWAYS_TRUE: return 1; - case _GUARD_NOT_EXHAUSTED_LIST: + case TO_BOOL_BOOL: return 1; - case _ITER_NEXT_LIST: - return 2; - case FOR_ITER_LIST: - return 2; - case _ITER_CHECK_TUPLE: + case TO_BOOL_INT: return 1; - case _ITER_JUMP_TUPLE: + case TO_BOOL_LIST: return 1; - case _GUARD_NOT_EXHAUSTED_TUPLE: + case TO_BOOL_NONE: return 1; - case _ITER_NEXT_TUPLE: - return 2; - case FOR_ITER_TUPLE: - return 2; - case _ITER_CHECK_RANGE: + case TO_BOOL_STR: return 1; - case _ITER_JUMP_RANGE: + case UNARY_INVERT: return 1; - case _GUARD_NOT_EXHAUSTED_RANGE: + case UNARY_NEGATIVE: return 1; - case _ITER_NEXT_RANGE: - return 2; - case FOR_ITER_RANGE: - return 2; - case FOR_ITER_GEN: - return 2; - case BEFORE_ASYNC_WITH: - return 2; - case BEFORE_WITH: - return 2; + case UNARY_NOT: + return 1; + case UNPACK_EX: + return (oparg & 0xFF) + (oparg >> 8) + 1; + case UNPACK_SEQUENCE: + return oparg; + case UNPACK_SEQUENCE_LIST: + return oparg; + case UNPACK_SEQUENCE_TUPLE: + return oparg; + case UNPACK_SEQUENCE_TWO_TUPLE: + return oparg; case WITH_EXCEPT_START: return 5; - case SETUP_FINALLY: - return 0; - case SETUP_CLEANUP: - return 0; - case SETUP_WITH: - return 0; - case POP_BLOCK: - return 0; - case PUSH_EXC_INFO: - return 2; - case _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT: + case YIELD_VALUE: return 1; - case _GUARD_KEYS_VERSION: + case _BINARY_OP: return 1; - case _LOAD_ATTR_METHOD_WITH_VALUES: - return 2; - case LOAD_ATTR_METHOD_WITH_VALUES: - return 2; - case _LOAD_ATTR_METHOD_NO_DICT: - return 2; - case LOAD_ATTR_METHOD_NO_DICT: - return 2; - case _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES: + case _BINARY_OP_ADD_FLOAT: return 1; - case LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES: + case _BINARY_OP_ADD_INT: return 1; - case _LOAD_ATTR_NONDESCRIPTOR_NO_DICT: + case _BINARY_OP_ADD_UNICODE: return 1; - case LOAD_ATTR_NONDESCRIPTOR_NO_DICT: + case _BINARY_OP_INPLACE_ADD_UNICODE: + return 0; + case _BINARY_OP_MULTIPLY_FLOAT: return 1; - case _CHECK_ATTR_METHOD_LAZY_DICT: + case _BINARY_OP_MULTIPLY_INT: + return 1; + case _BINARY_OP_SUBTRACT_FLOAT: + return 1; + case _BINARY_OP_SUBTRACT_INT: + return 1; + case _BINARY_SUBSCR: return 1; - case _LOAD_ATTR_METHOD_LAZY_DICT: - return 2; - case LOAD_ATTR_METHOD_LAZY_DICT: - return 2; - case INSTRUMENTED_CALL: - return 0; - case _SPECIALIZE_CALL: - return oparg + 2; case _CALL: return 1; - case CALL: + case _CHECK_ATTR_CLASS: + return 1; + case _CHECK_ATTR_METHOD_LAZY_DICT: + return 1; + case _CHECK_ATTR_MODULE: + return 1; + case _CHECK_ATTR_WITH_HINT: return 1; case _CHECK_CALL_BOUND_METHOD_EXACT_ARGS: return oparg + 2; - case _INIT_CALL_BOUND_METHOD_EXACT_ARGS: + case _CHECK_FUNCTION_EXACT_ARGS: return oparg + 2; + case _CHECK_MANAGED_OBJECT_HAS_VALUES: + return 1; case _CHECK_PEP_523: return 0; - case _CHECK_FUNCTION_EXACT_ARGS: - return oparg + 2; case _CHECK_STACK_SPACE: return oparg + 2; - case _INIT_CALL_PY_EXACT_ARGS: - return 1; - case _PUSH_FRAME: + case _CHECK_VALIDITY: return 0; - case CALL_BOUND_METHOD_EXACT_ARGS: + case _COMPARE_OP: + return 1; + case _EXIT_TRACE: return 0; - case CALL_PY_EXACT_ARGS: + case _FOR_ITER: + return 2; + case _FOR_ITER_TIER_TWO: + return 2; + case _GUARD_BOTH_FLOAT: + return 2; + case _GUARD_BOTH_INT: + return 2; + case _GUARD_BOTH_UNICODE: + return 2; + case _GUARD_BUILTINS_VERSION: return 0; - case CALL_PY_WITH_DEFAULTS: - return 1; - case CALL_TYPE_1: - return 1; - case CALL_STR_1: - return 1; - case CALL_TUPLE_1: + case _GUARD_DORV_VALUES: return 1; - case CALL_ALLOC_AND_ENTER_INIT: + case _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT: return 1; - case EXIT_INIT_CHECK: + case _GUARD_GLOBALS_VERSION: return 0; - case CALL_BUILTIN_CLASS: - return 1; - case CALL_BUILTIN_O: - return 1; - case CALL_BUILTIN_FAST: + case _GUARD_IS_FALSE_POP: + return 0; + case _GUARD_IS_NONE_POP: + return 0; + case _GUARD_IS_NOT_NONE_POP: + return 0; + case _GUARD_IS_TRUE_POP: + return 0; + case _GUARD_KEYS_VERSION: return 1; - case CALL_BUILTIN_FAST_WITH_KEYWORDS: + case _GUARD_NOT_EXHAUSTED_LIST: return 1; - case CALL_LEN: + case _GUARD_NOT_EXHAUSTED_RANGE: return 1; - case CALL_ISINSTANCE: + case _GUARD_NOT_EXHAUSTED_TUPLE: return 1; - case CALL_LIST_APPEND: + case _GUARD_TYPE_VERSION: return 1; - case CALL_METHOD_DESCRIPTOR_O: + case _INIT_CALL_BOUND_METHOD_EXACT_ARGS: + return oparg + 2; + case _INIT_CALL_PY_EXACT_ARGS: return 1; - case CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS: + case _INSERT: + return oparg + 1; + case _IS_NONE: return 1; - case CALL_METHOD_DESCRIPTOR_NOARGS: + case _ITER_CHECK_LIST: return 1; - case CALL_METHOD_DESCRIPTOR_FAST: + case _ITER_CHECK_RANGE: return 1; - case INSTRUMENTED_CALL_KW: - return 0; - case CALL_KW: + case _ITER_CHECK_TUPLE: return 1; - case INSTRUMENTED_CALL_FUNCTION_EX: - return 0; - case CALL_FUNCTION_EX: + case _ITER_JUMP_LIST: return 1; - case MAKE_FUNCTION: + case _ITER_JUMP_RANGE: return 1; - case SET_FUNCTION_ATTRIBUTE: + case _ITER_JUMP_TUPLE: return 1; - case RETURN_GENERATOR: + case _ITER_NEXT_LIST: + return 2; + case _ITER_NEXT_RANGE: + return 2; + case _ITER_NEXT_TUPLE: + return 2; + case _JUMP_TO_TOP: return 0; - case BUILD_SLICE: - return 1; - case CONVERT_VALUE: - return 1; - case FORMAT_SIMPLE: - return 1; - case FORMAT_WITH_SPEC: - return 1; - case COPY: - return (oparg-1) + 2; - case _SPECIALIZE_BINARY_OP: + case _LOAD_ATTR: + return ((oparg & 1) ? 1 : 0) + 1; + case _LOAD_ATTR_CLASS: + return ((oparg & 1) ? 1 : 0) + 1; + case _LOAD_ATTR_INSTANCE_VALUE: + return ((oparg & 1) ? 1 : 0) + 1; + case _LOAD_ATTR_METHOD_LAZY_DICT: return 2; - case _BINARY_OP: + case _LOAD_ATTR_METHOD_NO_DICT: + return 2; + case _LOAD_ATTR_METHOD_WITH_VALUES: + return 2; + case _LOAD_ATTR_MODULE: + return ((oparg & 1) ? 1 : 0) + 1; + case _LOAD_ATTR_NONDESCRIPTOR_NO_DICT: return 1; - case BINARY_OP: + case _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES: return 1; - case SWAP: - return (oparg-2) + 2; - case INSTRUMENTED_INSTRUCTION: - return 0; - case INSTRUMENTED_JUMP_FORWARD: - return 0; - case INSTRUMENTED_JUMP_BACKWARD: - return 0; - case INSTRUMENTED_POP_JUMP_IF_TRUE: - return 0; - case INSTRUMENTED_POP_JUMP_IF_FALSE: - return 0; - case INSTRUMENTED_POP_JUMP_IF_NONE: - return 0; - case INSTRUMENTED_POP_JUMP_IF_NOT_NONE: - return 0; - case EXTENDED_ARG: - return 0; - case CACHE: + case _LOAD_ATTR_SLOT: + return ((oparg & 1) ? 1 : 0) + 1; + case _LOAD_ATTR_WITH_HINT: + return ((oparg & 1) ? 1 : 0) + 1; + case _LOAD_GLOBAL: + return ((oparg & 1) ? 1 : 0) + 1; + case _LOAD_GLOBAL_BUILTINS: + return ((oparg & 1) ? 1 : 0) + 1; + case _LOAD_GLOBAL_MODULE: + return ((oparg & 1) ? 1 : 0) + 1; + case _LOAD_SUPER_ATTR: + return ((oparg & 1) ? 1 : 0) + 1; + case _POP_FRAME: return 0; - case RESERVED: + case _POP_JUMP_IF_FALSE: return 0; - case _GUARD_IS_TRUE_POP: + case _POP_JUMP_IF_TRUE: return 0; - case _GUARD_IS_FALSE_POP: + case _PUSH_FRAME: return 0; - case _GUARD_IS_NONE_POP: + case _SAVE_RETURN_OFFSET: return 0; - case _GUARD_IS_NOT_NONE_POP: + case _SEND: + return 2; + case _SET_IP: return 0; - case _JUMP_TO_TOP: + case _SPECIALIZE_BINARY_OP: + return 2; + case _SPECIALIZE_BINARY_SUBSCR: + return 2; + case _SPECIALIZE_CALL: + return oparg + 2; + case _SPECIALIZE_COMPARE_OP: + return 2; + case _SPECIALIZE_FOR_ITER: + return 1; + case _SPECIALIZE_LOAD_ATTR: + return 1; + case _SPECIALIZE_LOAD_GLOBAL: return 0; - case _SET_IP: + case _SPECIALIZE_LOAD_SUPER_ATTR: + return 3; + case _SPECIALIZE_SEND: + return 2; + case _SPECIALIZE_STORE_ATTR: + return 1; + case _SPECIALIZE_STORE_SUBSCR: + return 2; + case _SPECIALIZE_TO_BOOL: + return 1; + case _SPECIALIZE_UNPACK_SEQUENCE: + return 1; + case _STORE_ATTR: return 0; - case _SAVE_RETURN_OFFSET: + case _STORE_ATTR_INSTANCE_VALUE: return 0; - case _EXIT_TRACE: + case _STORE_ATTR_SLOT: return 0; - case _INSERT: - return oparg + 1; - case _CHECK_VALIDITY: + case _STORE_SUBSCR: return 0; + case _TO_BOOL: + return 1; + case _UNPACK_SEQUENCE: + return oparg; default: return -1; } @@ -1377,484 +1377,484 @@ struct opcode_macro_expansion { extern const struct opcode_metadata _PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE]; #ifdef NEED_OPCODE_METADATA const struct opcode_metadata _PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE] = { - [NOP] = { true, INSTR_FMT_IX, 0 }, - [RESUME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [RESUME_CHECK] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [INSTRUMENTED_RESUME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [LOAD_CLOSURE] = { true, 0, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, - [LOAD_FAST_CHECK] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_ERROR_FLAG }, - [LOAD_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, - [LOAD_FAST_AND_CLEAR] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, - [LOAD_FAST_LOAD_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, - [LOAD_CONST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_CONST_FLAG }, - [STORE_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, - [STORE_FAST_MAYBE_NULL] = { true, 0, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, - [STORE_FAST_LOAD_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, - [STORE_FAST_STORE_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, - [POP_TOP] = { true, INSTR_FMT_IX, 0 }, - [PUSH_NULL] = { true, INSTR_FMT_IX, 0 }, - [END_FOR] = { true, INSTR_FMT_IX, 0 }, - [INSTRUMENTED_END_FOR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [END_SEND] = { true, INSTR_FMT_IX, 0 }, - [INSTRUMENTED_END_SEND] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [UNARY_NEGATIVE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [UNARY_NOT] = { true, INSTR_FMT_IX, 0 }, - [_SPECIALIZE_TO_BOOL] = { true, INSTR_FMT_IXC, HAS_ESCAPES_FLAG }, - [_TO_BOOL] = { true, INSTR_FMT_IXC0, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [TO_BOOL] = { true, INSTR_FMT_IXC00, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [TO_BOOL_BOOL] = { true, INSTR_FMT_IXC00, HAS_DEOPT_FLAG }, - [TO_BOOL_INT] = { true, INSTR_FMT_IXC00, HAS_DEOPT_FLAG }, - [TO_BOOL_LIST] = { true, INSTR_FMT_IXC00, HAS_DEOPT_FLAG }, - [TO_BOOL_NONE] = { true, INSTR_FMT_IXC00, HAS_DEOPT_FLAG }, - [TO_BOOL_STR] = { true, INSTR_FMT_IXC00, HAS_DEOPT_FLAG }, - [TO_BOOL_ALWAYS_TRUE] = { true, INSTR_FMT_IXC00, HAS_DEOPT_FLAG }, - [UNARY_INVERT] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_GUARD_BOTH_INT] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_BINARY_OP_MULTIPLY_INT] = { true, INSTR_FMT_IXC, HAS_ERROR_FLAG }, - [_BINARY_OP_ADD_INT] = { true, INSTR_FMT_IXC, HAS_ERROR_FLAG }, - [_BINARY_OP_SUBTRACT_INT] = { true, INSTR_FMT_IXC, HAS_ERROR_FLAG }, - [BINARY_OP_MULTIPLY_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ERROR_FLAG }, - [BINARY_OP_ADD_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ERROR_FLAG }, - [BINARY_OP_SUBTRACT_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ERROR_FLAG }, - [_GUARD_BOTH_FLOAT] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_BINARY_OP_MULTIPLY_FLOAT] = { true, INSTR_FMT_IXC, 0 }, - [_BINARY_OP_ADD_FLOAT] = { true, INSTR_FMT_IXC, 0 }, - [_BINARY_OP_SUBTRACT_FLOAT] = { true, INSTR_FMT_IXC, 0 }, - [BINARY_OP_MULTIPLY_FLOAT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG }, + [BEFORE_ASYNC_WITH] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [BEFORE_WITH] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [BINARY_OP] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [BINARY_OP_ADD_FLOAT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG }, - [BINARY_OP_SUBTRACT_FLOAT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG }, - [_GUARD_BOTH_UNICODE] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_BINARY_OP_ADD_UNICODE] = { true, INSTR_FMT_IXC, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [BINARY_OP_ADD_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ERROR_FLAG }, [BINARY_OP_ADD_UNICODE] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_BINARY_OP_INPLACE_ADD_UNICODE] = { true, INSTR_FMT_IXC, HAS_LOCAL_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [BINARY_OP_INPLACE_ADD_UNICODE] = { true, INSTR_FMT_IXC, HAS_LOCAL_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_SPECIALIZE_BINARY_SUBSCR] = { true, INSTR_FMT_IXC, HAS_ESCAPES_FLAG }, - [_BINARY_SUBSCR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [BINARY_SUBSCR] = { true, INSTR_FMT_IXC, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [BINARY_OP_MULTIPLY_FLOAT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG }, + [BINARY_OP_MULTIPLY_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ERROR_FLAG }, + [BINARY_OP_SUBTRACT_FLOAT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG }, + [BINARY_OP_SUBTRACT_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ERROR_FLAG }, [BINARY_SLICE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [STORE_SLICE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [BINARY_SUBSCR] = { true, INSTR_FMT_IXC, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [BINARY_SUBSCR_DICT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [BINARY_SUBSCR_GETITEM] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, [BINARY_SUBSCR_LIST_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG }, [BINARY_SUBSCR_STR_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG }, [BINARY_SUBSCR_TUPLE_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG }, - [BINARY_SUBSCR_DICT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [BINARY_SUBSCR_GETITEM] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, - [LIST_APPEND] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG }, - [SET_ADD] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_SPECIALIZE_STORE_SUBSCR] = { true, INSTR_FMT_IXC, HAS_ESCAPES_FLAG }, - [_STORE_SUBSCR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [STORE_SUBSCR] = { true, INSTR_FMT_IXC, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [STORE_SUBSCR_LIST_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, - [STORE_SUBSCR_DICT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [DELETE_SUBSCR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [BUILD_CONST_KEY_MAP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [BUILD_LIST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [BUILD_MAP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [BUILD_SET] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [BUILD_SLICE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [BUILD_STRING] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [BUILD_TUPLE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [CACHE] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG }, + [CALL] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [CALL_ALLOC_AND_ENTER_INIT] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [CALL_BOUND_METHOD_EXACT_ARGS] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, + [CALL_BUILTIN_CLASS] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG }, + [CALL_BUILTIN_FAST] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [CALL_BUILTIN_FAST_WITH_KEYWORDS] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [CALL_BUILTIN_O] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [CALL_FUNCTION_EX] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [CALL_INTRINSIC_1] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [CALL_INTRINSIC_2] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [RAISE_VARARGS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [INTERPRETER_EXIT] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG }, - [_POP_FRAME] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG }, - [RETURN_VALUE] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG }, - [INSTRUMENTED_RETURN_VALUE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [RETURN_CONST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_CONST_FLAG | HAS_ESCAPES_FLAG }, - [INSTRUMENTED_RETURN_CONST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_CONST_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [CALL_ISINSTANCE] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [CALL_KW] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [CALL_LEN] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [CALL_LIST_APPEND] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG }, + [CALL_METHOD_DESCRIPTOR_FAST] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [CALL_METHOD_DESCRIPTOR_NOARGS] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [CALL_METHOD_DESCRIPTOR_O] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [CALL_PY_EXACT_ARGS] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, + [CALL_PY_WITH_DEFAULTS] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, + [CALL_STR_1] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [CALL_TUPLE_1] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [CALL_TYPE_1] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, + [CHECK_EG_MATCH] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [CHECK_EXC_MATCH] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [CLEANUP_THROW] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [COMPARE_OP] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [COMPARE_OP_FLOAT] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, + [COMPARE_OP_INT] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, + [COMPARE_OP_STR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, + [CONTAINS_OP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [CONVERT_VALUE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG }, + [COPY] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, + [COPY_FREE_VARS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, + [DELETE_ATTR] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [DELETE_DEREF] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [DELETE_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_ERROR_FLAG }, + [DELETE_GLOBAL] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [DELETE_NAME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [DELETE_SUBSCR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [DICT_MERGE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [DICT_UPDATE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [END_ASYNC_FOR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [END_FOR] = { true, INSTR_FMT_IX, 0 }, + [END_SEND] = { true, INSTR_FMT_IX, 0 }, + [ENTER_EXECUTOR] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [EXIT_INIT_CHECK] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [EXTENDED_ARG] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, + [FORMAT_SIMPLE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [FORMAT_WITH_SPEC] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [FOR_ITER] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [FOR_ITER_GEN] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, + [FOR_ITER_LIST] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_DEOPT_FLAG }, + [FOR_ITER_RANGE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [FOR_ITER_TUPLE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_DEOPT_FLAG }, [GET_AITER] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [GET_ANEXT] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [GET_AWAITABLE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_SPECIALIZE_SEND] = { true, INSTR_FMT_IXC, HAS_ESCAPES_FLAG }, - [_SEND] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [SEND] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [SEND_GEN] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, + [GET_ITER] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [GET_LEN] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [GET_YIELD_FROM_ITER] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [IMPORT_FROM] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [IMPORT_NAME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [INSTRUMENTED_CALL] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [INSTRUMENTED_CALL_FUNCTION_EX] = { true, INSTR_FMT_IX, 0 }, + [INSTRUMENTED_CALL_KW] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [INSTRUMENTED_END_FOR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [INSTRUMENTED_END_SEND] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [INSTRUMENTED_FOR_ITER] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [INSTRUMENTED_INSTRUCTION] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [INSTRUMENTED_JUMP_BACKWARD] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG }, + [INSTRUMENTED_JUMP_FORWARD] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, + [INSTRUMENTED_LOAD_SUPER_ATTR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG }, + [INSTRUMENTED_POP_JUMP_IF_FALSE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG }, + [INSTRUMENTED_POP_JUMP_IF_NONE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG }, + [INSTRUMENTED_POP_JUMP_IF_NOT_NONE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG }, + [INSTRUMENTED_POP_JUMP_IF_TRUE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG }, + [INSTRUMENTED_RESUME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [INSTRUMENTED_RETURN_CONST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_CONST_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [INSTRUMENTED_RETURN_VALUE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [INSTRUMENTED_YIELD_VALUE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [YIELD_VALUE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, - [POP_EXCEPT] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG }, - [RERAISE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [END_ASYNC_FOR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CLEANUP_THROW] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [INTERPRETER_EXIT] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG }, + [IS_OP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, + [JUMP] = { true, 0, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [JUMP_BACKWARD] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [JUMP_BACKWARD_NO_INTERRUPT] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG }, + [JUMP_FORWARD] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG }, + [JUMP_NO_INTERRUPT] = { true, 0, HAS_ARG_FLAG | HAS_JUMP_FLAG }, + [LIST_APPEND] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG }, + [LIST_EXTEND] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [LOAD_ASSERTION_ERROR] = { true, INSTR_FMT_IX, 0 }, + [LOAD_ATTR] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [LOAD_ATTR_CLASS] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, + [LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, + [LOAD_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, + [LOAD_ATTR_METHOD_LAZY_DICT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, + [LOAD_ATTR_METHOD_NO_DICT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, + [LOAD_ATTR_METHOD_WITH_VALUES] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, + [LOAD_ATTR_MODULE] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, + [LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, + [LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, + [LOAD_ATTR_PROPERTY] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, + [LOAD_ATTR_SLOT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, + [LOAD_ATTR_WITH_HINT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, [LOAD_BUILD_CLASS] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [STORE_NAME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [DELETE_NAME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_SPECIALIZE_UNPACK_SEQUENCE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, - [_UNPACK_SEQUENCE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [UNPACK_SEQUENCE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [UNPACK_SEQUENCE_TWO_TUPLE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [UNPACK_SEQUENCE_TUPLE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [UNPACK_SEQUENCE_LIST] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [UNPACK_EX] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_SPECIALIZE_STORE_ATTR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ESCAPES_FLAG }, - [_STORE_ATTR] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [STORE_ATTR] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [DELETE_ATTR] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [STORE_GLOBAL] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [DELETE_GLOBAL] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [LOAD_LOCALS] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [LOAD_CLOSURE] = { true, 0, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, + [LOAD_CONST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_CONST_FLAG }, + [LOAD_DEREF] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [LOAD_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, + [LOAD_FAST_AND_CLEAR] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, + [LOAD_FAST_CHECK] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_ERROR_FLAG }, + [LOAD_FAST_LOAD_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, + [LOAD_FROM_DICT_OR_DEREF] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [LOAD_FROM_DICT_OR_GLOBALS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [LOAD_NAME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_SPECIALIZE_LOAD_GLOBAL] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ESCAPES_FLAG }, - [_LOAD_GLOBAL] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [LOAD_GLOBAL] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_GUARD_GLOBALS_VERSION] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG }, - [_GUARD_BUILTINS_VERSION] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG }, - [_LOAD_GLOBAL_MODULE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [_LOAD_GLOBAL_BUILTINS] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [LOAD_GLOBAL_MODULE] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, [LOAD_GLOBAL_BUILTIN] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [DELETE_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_ERROR_FLAG }, - [MAKE_CELL] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [DELETE_DEREF] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [LOAD_FROM_DICT_OR_DEREF] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [LOAD_DEREF] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [STORE_DEREF] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ESCAPES_FLAG }, - [COPY_FREE_VARS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, - [BUILD_STRING] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [BUILD_TUPLE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [BUILD_LIST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [LIST_EXTEND] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [SET_UPDATE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [BUILD_SET] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [BUILD_MAP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [SETUP_ANNOTATIONS] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [BUILD_CONST_KEY_MAP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [DICT_UPDATE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [DICT_MERGE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [MAP_ADD] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [INSTRUMENTED_LOAD_SUPER_ATTR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG }, - [_SPECIALIZE_LOAD_SUPER_ATTR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, - [_LOAD_SUPER_ATTR] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [LOAD_GLOBAL_MODULE] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, + [LOAD_LOCALS] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [LOAD_METHOD] = { true, 0, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [LOAD_NAME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [LOAD_SUPER_ATTR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [LOAD_SUPER_METHOD] = { true, 0, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [LOAD_ZERO_SUPER_METHOD] = { true, 0, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [LOAD_ZERO_SUPER_ATTR] = { true, 0, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [LOAD_SUPER_ATTR_ATTR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [LOAD_SUPER_ATTR_METHOD] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_SPECIALIZE_LOAD_ATTR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ESCAPES_FLAG }, - [_LOAD_ATTR] = { true, INSTR_FMT_IBC0000000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [LOAD_ATTR] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [LOAD_METHOD] = { true, 0, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_GUARD_TYPE_VERSION] = { true, INSTR_FMT_IXC0, HAS_DEOPT_FLAG }, - [_CHECK_MANAGED_OBJECT_HAS_VALUES] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_LOAD_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [LOAD_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [_CHECK_ATTR_MODULE] = { true, INSTR_FMT_IXC0, HAS_DEOPT_FLAG }, - [_LOAD_ATTR_MODULE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [LOAD_ATTR_MODULE] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [_CHECK_ATTR_WITH_HINT] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, - [_LOAD_ATTR_WITH_HINT] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, - [LOAD_ATTR_WITH_HINT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, - [_LOAD_ATTR_SLOT] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [LOAD_ATTR_SLOT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [_CHECK_ATTR_CLASS] = { true, INSTR_FMT_IXC0, HAS_DEOPT_FLAG }, - [_LOAD_ATTR_CLASS] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG }, - [LOAD_ATTR_CLASS] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [LOAD_ATTR_PROPERTY] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, - [LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, - [_GUARD_DORV_VALUES] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_STORE_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IXC, HAS_ESCAPES_FLAG }, - [STORE_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IXC000, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, - [STORE_ATTR_WITH_HINT] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, - [_STORE_ATTR_SLOT] = { true, INSTR_FMT_IXC, HAS_ESCAPES_FLAG }, - [STORE_ATTR_SLOT] = { true, INSTR_FMT_IXC000, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, - [_SPECIALIZE_COMPARE_OP] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, - [_COMPARE_OP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [COMPARE_OP] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [COMPARE_OP_FLOAT] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, - [COMPARE_OP_INT] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, - [COMPARE_OP_STR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, - [IS_OP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, - [CONTAINS_OP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CHECK_EG_MATCH] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CHECK_EXC_MATCH] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [IMPORT_NAME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [IMPORT_FROM] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [JUMP_FORWARD] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG }, - [JUMP_BACKWARD] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [JUMP] = { true, 0, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [JUMP_NO_INTERRUPT] = { true, 0, HAS_ARG_FLAG | HAS_JUMP_FLAG }, - [ENTER_EXECUTOR] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_POP_JUMP_IF_FALSE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG }, - [_POP_JUMP_IF_TRUE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG }, - [_IS_NONE] = { true, INSTR_FMT_IX, 0 }, - [POP_JUMP_IF_TRUE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG }, - [POP_JUMP_IF_FALSE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG }, - [POP_JUMP_IF_NONE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG }, - [POP_JUMP_IF_NOT_NONE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG }, - [JUMP_BACKWARD_NO_INTERRUPT] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG }, - [GET_LEN] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [LOAD_SUPER_METHOD] = { true, 0, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [LOAD_ZERO_SUPER_ATTR] = { true, 0, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [LOAD_ZERO_SUPER_METHOD] = { true, 0, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [MAKE_CELL] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [MAKE_FUNCTION] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [MAP_ADD] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [MATCH_CLASS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [MATCH_KEYS] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [MATCH_MAPPING] = { true, INSTR_FMT_IX, 0 }, [MATCH_SEQUENCE] = { true, INSTR_FMT_IX, 0 }, - [MATCH_KEYS] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [GET_ITER] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [GET_YIELD_FROM_ITER] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_SPECIALIZE_FOR_ITER] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, - [_FOR_ITER] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_FOR_ITER_TIER_TWO] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [FOR_ITER] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [INSTRUMENTED_FOR_ITER] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_ITER_CHECK_LIST] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_ITER_JUMP_LIST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG }, - [_GUARD_NOT_EXHAUSTED_LIST] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_ITER_NEXT_LIST] = { true, INSTR_FMT_IX, 0 }, - [FOR_ITER_LIST] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_DEOPT_FLAG }, - [_ITER_CHECK_TUPLE] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_ITER_JUMP_TUPLE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG }, - [_GUARD_NOT_EXHAUSTED_TUPLE] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_ITER_NEXT_TUPLE] = { true, INSTR_FMT_IX, 0 }, - [FOR_ITER_TUPLE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_DEOPT_FLAG }, - [_ITER_CHECK_RANGE] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_ITER_JUMP_RANGE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG }, - [_GUARD_NOT_EXHAUSTED_RANGE] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_ITER_NEXT_RANGE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [FOR_ITER_RANGE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [FOR_ITER_GEN] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, - [BEFORE_ASYNC_WITH] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [BEFORE_WITH] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [WITH_EXCEPT_START] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [SETUP_FINALLY] = { true, 0, HAS_ARG_FLAG }, - [SETUP_CLEANUP] = { true, 0, HAS_ARG_FLAG }, - [SETUP_WITH] = { true, 0, HAS_ARG_FLAG }, + [NOP] = { true, INSTR_FMT_IX, 0 }, [POP_BLOCK] = { true, 0, 0 }, + [POP_EXCEPT] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG }, + [POP_JUMP_IF_FALSE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG }, + [POP_JUMP_IF_NONE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG }, + [POP_JUMP_IF_NOT_NONE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG }, + [POP_JUMP_IF_TRUE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG }, + [POP_TOP] = { true, INSTR_FMT_IX, 0 }, [PUSH_EXC_INFO] = { true, INSTR_FMT_IX, 0 }, - [_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_GUARD_KEYS_VERSION] = { true, INSTR_FMT_IXC0, HAS_DEOPT_FLAG }, - [_LOAD_ATTR_METHOD_WITH_VALUES] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, - [LOAD_ATTR_METHOD_WITH_VALUES] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, - [_LOAD_ATTR_METHOD_NO_DICT] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, - [LOAD_ATTR_METHOD_NO_DICT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, - [_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG }, - [LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [_LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG }, - [LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [_CHECK_ATTR_METHOD_LAZY_DICT] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_LOAD_ATTR_METHOD_LAZY_DICT] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, - [LOAD_ATTR_METHOD_LAZY_DICT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, - [INSTRUMENTED_CALL] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_SPECIALIZE_CALL] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, + [PUSH_NULL] = { true, INSTR_FMT_IX, 0 }, + [RAISE_VARARGS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [RERAISE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [RESERVED] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG }, + [RESUME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [RESUME_CHECK] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, + [RETURN_CONST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_CONST_FLAG | HAS_ESCAPES_FLAG }, + [RETURN_GENERATOR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [RETURN_VALUE] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG }, + [SEND] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [SEND_GEN] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, + [SETUP_ANNOTATIONS] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [SETUP_CLEANUP] = { true, 0, HAS_ARG_FLAG }, + [SETUP_FINALLY] = { true, 0, HAS_ARG_FLAG }, + [SETUP_WITH] = { true, 0, HAS_ARG_FLAG }, + [SET_ADD] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [SET_FUNCTION_ATTRIBUTE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, + [SET_UPDATE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [STORE_ATTR] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [STORE_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IXC000, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, + [STORE_ATTR_SLOT] = { true, INSTR_FMT_IXC000, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, + [STORE_ATTR_WITH_HINT] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, + [STORE_DEREF] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ESCAPES_FLAG }, + [STORE_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, + [STORE_FAST_LOAD_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, + [STORE_FAST_MAYBE_NULL] = { true, 0, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, + [STORE_FAST_STORE_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, + [STORE_GLOBAL] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [STORE_NAME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [STORE_SLICE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [STORE_SUBSCR] = { true, INSTR_FMT_IXC, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [STORE_SUBSCR_DICT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [STORE_SUBSCR_LIST_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, + [SWAP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, + [TO_BOOL] = { true, INSTR_FMT_IXC00, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [TO_BOOL_ALWAYS_TRUE] = { true, INSTR_FMT_IXC00, HAS_DEOPT_FLAG }, + [TO_BOOL_BOOL] = { true, INSTR_FMT_IXC00, HAS_DEOPT_FLAG }, + [TO_BOOL_INT] = { true, INSTR_FMT_IXC00, HAS_DEOPT_FLAG }, + [TO_BOOL_LIST] = { true, INSTR_FMT_IXC00, HAS_DEOPT_FLAG }, + [TO_BOOL_NONE] = { true, INSTR_FMT_IXC00, HAS_DEOPT_FLAG }, + [TO_BOOL_STR] = { true, INSTR_FMT_IXC00, HAS_DEOPT_FLAG }, + [UNARY_INVERT] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [UNARY_NEGATIVE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [UNARY_NOT] = { true, INSTR_FMT_IX, 0 }, + [UNPACK_EX] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [UNPACK_SEQUENCE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [UNPACK_SEQUENCE_LIST] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, + [UNPACK_SEQUENCE_TUPLE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, + [UNPACK_SEQUENCE_TWO_TUPLE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, + [WITH_EXCEPT_START] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [YIELD_VALUE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, + [_BINARY_OP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG }, + [_BINARY_OP_ADD_FLOAT] = { true, INSTR_FMT_IXC, 0 }, + [_BINARY_OP_ADD_INT] = { true, INSTR_FMT_IXC, HAS_ERROR_FLAG }, + [_BINARY_OP_ADD_UNICODE] = { true, INSTR_FMT_IXC, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [_BINARY_OP_INPLACE_ADD_UNICODE] = { true, INSTR_FMT_IXC, HAS_LOCAL_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [_BINARY_OP_MULTIPLY_FLOAT] = { true, INSTR_FMT_IXC, 0 }, + [_BINARY_OP_MULTIPLY_INT] = { true, INSTR_FMT_IXC, HAS_ERROR_FLAG }, + [_BINARY_OP_SUBTRACT_FLOAT] = { true, INSTR_FMT_IXC, 0 }, + [_BINARY_OP_SUBTRACT_INT] = { true, INSTR_FMT_IXC, HAS_ERROR_FLAG }, + [_BINARY_SUBSCR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [_CALL] = { true, INSTR_FMT_IBC0, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CALL] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [_CHECK_ATTR_CLASS] = { true, INSTR_FMT_IXC0, HAS_DEOPT_FLAG }, + [_CHECK_ATTR_METHOD_LAZY_DICT] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, + [_CHECK_ATTR_MODULE] = { true, INSTR_FMT_IXC0, HAS_DEOPT_FLAG }, + [_CHECK_ATTR_WITH_HINT] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, [_CHECK_CALL_BOUND_METHOD_EXACT_ARGS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [_INIT_CALL_BOUND_METHOD_EXACT_ARGS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, - [_CHECK_PEP_523] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, [_CHECK_FUNCTION_EXACT_ARGS] = { true, INSTR_FMT_IBC0, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, + [_CHECK_MANAGED_OBJECT_HAS_VALUES] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, + [_CHECK_PEP_523] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, [_CHECK_STACK_SPACE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [_INIT_CALL_PY_EXACT_ARGS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, - [_PUSH_FRAME] = { true, INSTR_FMT_IX, 0 }, - [CALL_BOUND_METHOD_EXACT_ARGS] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, - [CALL_PY_EXACT_ARGS] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, - [CALL_PY_WITH_DEFAULTS] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, - [CALL_TYPE_1] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [CALL_STR_1] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CALL_TUPLE_1] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CALL_ALLOC_AND_ENTER_INIT] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [EXIT_INIT_CHECK] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CALL_BUILTIN_CLASS] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG }, - [CALL_BUILTIN_O] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CALL_BUILTIN_FAST] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CALL_BUILTIN_FAST_WITH_KEYWORDS] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CALL_LEN] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CALL_ISINSTANCE] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CALL_LIST_APPEND] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG }, - [CALL_METHOD_DESCRIPTOR_O] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CALL_METHOD_DESCRIPTOR_NOARGS] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CALL_METHOD_DESCRIPTOR_FAST] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [INSTRUMENTED_CALL_KW] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CALL_KW] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [INSTRUMENTED_CALL_FUNCTION_EX] = { true, INSTR_FMT_IX, 0 }, - [CALL_FUNCTION_EX] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [MAKE_FUNCTION] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [SET_FUNCTION_ATTRIBUTE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, - [RETURN_GENERATOR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [BUILD_SLICE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CONVERT_VALUE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG }, - [FORMAT_SIMPLE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [FORMAT_WITH_SPEC] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [COPY] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, - [_SPECIALIZE_BINARY_OP] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, - [_BINARY_OP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG }, - [BINARY_OP] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [SWAP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, - [INSTRUMENTED_INSTRUCTION] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [INSTRUMENTED_JUMP_FORWARD] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, - [INSTRUMENTED_JUMP_BACKWARD] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG }, - [INSTRUMENTED_POP_JUMP_IF_TRUE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG }, - [INSTRUMENTED_POP_JUMP_IF_FALSE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG }, - [INSTRUMENTED_POP_JUMP_IF_NONE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG }, - [INSTRUMENTED_POP_JUMP_IF_NOT_NONE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG }, - [EXTENDED_ARG] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, - [CACHE] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG }, - [RESERVED] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG }, - [_GUARD_IS_TRUE_POP] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, + [_CHECK_VALIDITY] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, + [_COMPARE_OP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [_EXIT_TRACE] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, + [_FOR_ITER] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [_FOR_ITER_TIER_TWO] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [_GUARD_BOTH_FLOAT] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, + [_GUARD_BOTH_INT] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, + [_GUARD_BOTH_UNICODE] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, + [_GUARD_BUILTINS_VERSION] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG }, + [_GUARD_DORV_VALUES] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, + [_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, + [_GUARD_GLOBALS_VERSION] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG }, [_GUARD_IS_FALSE_POP] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, [_GUARD_IS_NONE_POP] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, [_GUARD_IS_NOT_NONE_POP] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, + [_GUARD_IS_TRUE_POP] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, + [_GUARD_KEYS_VERSION] = { true, INSTR_FMT_IXC0, HAS_DEOPT_FLAG }, + [_GUARD_NOT_EXHAUSTED_LIST] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, + [_GUARD_NOT_EXHAUSTED_RANGE] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, + [_GUARD_NOT_EXHAUSTED_TUPLE] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, + [_GUARD_TYPE_VERSION] = { true, INSTR_FMT_IXC0, HAS_DEOPT_FLAG }, + [_INIT_CALL_BOUND_METHOD_EXACT_ARGS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, + [_INIT_CALL_PY_EXACT_ARGS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, + [_INSERT] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, + [_IS_NONE] = { true, INSTR_FMT_IX, 0 }, + [_ITER_CHECK_LIST] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, + [_ITER_CHECK_RANGE] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, + [_ITER_CHECK_TUPLE] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, + [_ITER_JUMP_LIST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG }, + [_ITER_JUMP_RANGE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG }, + [_ITER_JUMP_TUPLE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG }, + [_ITER_NEXT_LIST] = { true, INSTR_FMT_IX, 0 }, + [_ITER_NEXT_RANGE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [_ITER_NEXT_TUPLE] = { true, INSTR_FMT_IX, 0 }, [_JUMP_TO_TOP] = { true, INSTR_FMT_IX, HAS_EVAL_BREAK_FLAG }, - [_SET_IP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, + [_LOAD_ATTR] = { true, INSTR_FMT_IBC0000000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [_LOAD_ATTR_CLASS] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG }, + [_LOAD_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, + [_LOAD_ATTR_METHOD_LAZY_DICT] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, + [_LOAD_ATTR_METHOD_NO_DICT] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, + [_LOAD_ATTR_METHOD_WITH_VALUES] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, + [_LOAD_ATTR_MODULE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, + [_LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG }, + [_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG }, + [_LOAD_ATTR_SLOT] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, + [_LOAD_ATTR_WITH_HINT] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, + [_LOAD_GLOBAL] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [_LOAD_GLOBAL_BUILTINS] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, + [_LOAD_GLOBAL_MODULE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, + [_LOAD_SUPER_ATTR] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [_POP_FRAME] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG }, + [_POP_JUMP_IF_FALSE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG }, + [_POP_JUMP_IF_TRUE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG }, + [_PUSH_FRAME] = { true, INSTR_FMT_IX, 0 }, [_SAVE_RETURN_OFFSET] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, - [_EXIT_TRACE] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_INSERT] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, - [_CHECK_VALIDITY] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, + [_SEND] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [_SET_IP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, + [_SPECIALIZE_BINARY_OP] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, + [_SPECIALIZE_BINARY_SUBSCR] = { true, INSTR_FMT_IXC, HAS_ESCAPES_FLAG }, + [_SPECIALIZE_CALL] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, + [_SPECIALIZE_COMPARE_OP] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, + [_SPECIALIZE_FOR_ITER] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, + [_SPECIALIZE_LOAD_ATTR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ESCAPES_FLAG }, + [_SPECIALIZE_LOAD_GLOBAL] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ESCAPES_FLAG }, + [_SPECIALIZE_LOAD_SUPER_ATTR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, + [_SPECIALIZE_SEND] = { true, INSTR_FMT_IXC, HAS_ESCAPES_FLAG }, + [_SPECIALIZE_STORE_ATTR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ESCAPES_FLAG }, + [_SPECIALIZE_STORE_SUBSCR] = { true, INSTR_FMT_IXC, HAS_ESCAPES_FLAG }, + [_SPECIALIZE_TO_BOOL] = { true, INSTR_FMT_IXC, HAS_ESCAPES_FLAG }, + [_SPECIALIZE_UNPACK_SEQUENCE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, + [_STORE_ATTR] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [_STORE_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IXC, HAS_ESCAPES_FLAG }, + [_STORE_ATTR_SLOT] = { true, INSTR_FMT_IXC, HAS_ESCAPES_FLAG }, + [_STORE_SUBSCR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [_TO_BOOL] = { true, INSTR_FMT_IXC0, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [_UNPACK_SEQUENCE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, }; #endif // NEED_OPCODE_METADATA extern const struct opcode_macro_expansion _PyOpcode_macro_expansion[OPCODE_MACRO_EXPANSION_SIZE]; #ifdef NEED_OPCODE_METADATA const struct opcode_macro_expansion _PyOpcode_macro_expansion[OPCODE_MACRO_EXPANSION_SIZE] = { - [NOP] = { .nuops = 1, .uops = { { NOP, 0, 0 } } }, - [RESUME_CHECK] = { .nuops = 1, .uops = { { RESUME_CHECK, 0, 0 } } }, - [LOAD_FAST_CHECK] = { .nuops = 1, .uops = { { LOAD_FAST_CHECK, 0, 0 } } }, - [LOAD_FAST] = { .nuops = 1, .uops = { { LOAD_FAST, 0, 0 } } }, - [LOAD_FAST_AND_CLEAR] = { .nuops = 1, .uops = { { LOAD_FAST_AND_CLEAR, 0, 0 } } }, - [LOAD_FAST_LOAD_FAST] = { .nuops = 2, .uops = { { LOAD_FAST, 5, 0 }, { LOAD_FAST, 6, 0 } } }, - [LOAD_CONST] = { .nuops = 1, .uops = { { LOAD_CONST, 0, 0 } } }, - [STORE_FAST] = { .nuops = 1, .uops = { { STORE_FAST, 0, 0 } } }, - [STORE_FAST_LOAD_FAST] = { .nuops = 2, .uops = { { STORE_FAST, 5, 0 }, { LOAD_FAST, 6, 0 } } }, - [STORE_FAST_STORE_FAST] = { .nuops = 2, .uops = { { STORE_FAST, 5, 0 }, { STORE_FAST, 6, 0 } } }, - [POP_TOP] = { .nuops = 1, .uops = { { POP_TOP, 0, 0 } } }, - [PUSH_NULL] = { .nuops = 1, .uops = { { PUSH_NULL, 0, 0 } } }, - [END_FOR] = { .nuops = 2, .uops = { { POP_TOP, 0, 0 }, { POP_TOP, 0, 0 } } }, - [END_SEND] = { .nuops = 1, .uops = { { END_SEND, 0, 0 } } }, - [UNARY_NEGATIVE] = { .nuops = 1, .uops = { { UNARY_NEGATIVE, 0, 0 } } }, - [UNARY_NOT] = { .nuops = 1, .uops = { { UNARY_NOT, 0, 0 } } }, - [TO_BOOL] = { .nuops = 1, .uops = { { _TO_BOOL, 0, 0 } } }, - [TO_BOOL_BOOL] = { .nuops = 1, .uops = { { TO_BOOL_BOOL, 0, 0 } } }, - [TO_BOOL_INT] = { .nuops = 1, .uops = { { TO_BOOL_INT, 0, 0 } } }, - [TO_BOOL_LIST] = { .nuops = 1, .uops = { { TO_BOOL_LIST, 0, 0 } } }, - [TO_BOOL_NONE] = { .nuops = 1, .uops = { { TO_BOOL_NONE, 0, 0 } } }, - [TO_BOOL_STR] = { .nuops = 1, .uops = { { TO_BOOL_STR, 0, 0 } } }, - [TO_BOOL_ALWAYS_TRUE] = { .nuops = 1, .uops = { { TO_BOOL_ALWAYS_TRUE, 2, 1 } } }, - [UNARY_INVERT] = { .nuops = 1, .uops = { { UNARY_INVERT, 0, 0 } } }, - [BINARY_OP_MULTIPLY_INT] = { .nuops = 2, .uops = { { _GUARD_BOTH_INT, 0, 0 }, { _BINARY_OP_MULTIPLY_INT, 0, 0 } } }, + [BEFORE_ASYNC_WITH] = { .nuops = 1, .uops = { { BEFORE_ASYNC_WITH, 0, 0 } } }, + [BEFORE_WITH] = { .nuops = 1, .uops = { { BEFORE_WITH, 0, 0 } } }, + [BINARY_OP] = { .nuops = 1, .uops = { { _BINARY_OP, 0, 0 } } }, + [BINARY_OP_ADD_FLOAT] = { .nuops = 2, .uops = { { _GUARD_BOTH_FLOAT, 0, 0 }, { _BINARY_OP_ADD_FLOAT, 0, 0 } } }, [BINARY_OP_ADD_INT] = { .nuops = 2, .uops = { { _GUARD_BOTH_INT, 0, 0 }, { _BINARY_OP_ADD_INT, 0, 0 } } }, - [BINARY_OP_SUBTRACT_INT] = { .nuops = 2, .uops = { { _GUARD_BOTH_INT, 0, 0 }, { _BINARY_OP_SUBTRACT_INT, 0, 0 } } }, + [BINARY_OP_ADD_UNICODE] = { .nuops = 2, .uops = { { _GUARD_BOTH_UNICODE, 0, 0 }, { _BINARY_OP_ADD_UNICODE, 0, 0 } } }, [BINARY_OP_MULTIPLY_FLOAT] = { .nuops = 2, .uops = { { _GUARD_BOTH_FLOAT, 0, 0 }, { _BINARY_OP_MULTIPLY_FLOAT, 0, 0 } } }, - [BINARY_OP_ADD_FLOAT] = { .nuops = 2, .uops = { { _GUARD_BOTH_FLOAT, 0, 0 }, { _BINARY_OP_ADD_FLOAT, 0, 0 } } }, + [BINARY_OP_MULTIPLY_INT] = { .nuops = 2, .uops = { { _GUARD_BOTH_INT, 0, 0 }, { _BINARY_OP_MULTIPLY_INT, 0, 0 } } }, [BINARY_OP_SUBTRACT_FLOAT] = { .nuops = 2, .uops = { { _GUARD_BOTH_FLOAT, 0, 0 }, { _BINARY_OP_SUBTRACT_FLOAT, 0, 0 } } }, - [BINARY_OP_ADD_UNICODE] = { .nuops = 2, .uops = { { _GUARD_BOTH_UNICODE, 0, 0 }, { _BINARY_OP_ADD_UNICODE, 0, 0 } } }, - [BINARY_SUBSCR] = { .nuops = 1, .uops = { { _BINARY_SUBSCR, 0, 0 } } }, + [BINARY_OP_SUBTRACT_INT] = { .nuops = 2, .uops = { { _GUARD_BOTH_INT, 0, 0 }, { _BINARY_OP_SUBTRACT_INT, 0, 0 } } }, [BINARY_SLICE] = { .nuops = 1, .uops = { { BINARY_SLICE, 0, 0 } } }, - [STORE_SLICE] = { .nuops = 1, .uops = { { STORE_SLICE, 0, 0 } } }, + [BINARY_SUBSCR] = { .nuops = 1, .uops = { { _BINARY_SUBSCR, 0, 0 } } }, + [BINARY_SUBSCR_DICT] = { .nuops = 1, .uops = { { BINARY_SUBSCR_DICT, 0, 0 } } }, [BINARY_SUBSCR_LIST_INT] = { .nuops = 1, .uops = { { BINARY_SUBSCR_LIST_INT, 0, 0 } } }, [BINARY_SUBSCR_STR_INT] = { .nuops = 1, .uops = { { BINARY_SUBSCR_STR_INT, 0, 0 } } }, [BINARY_SUBSCR_TUPLE_INT] = { .nuops = 1, .uops = { { BINARY_SUBSCR_TUPLE_INT, 0, 0 } } }, - [BINARY_SUBSCR_DICT] = { .nuops = 1, .uops = { { BINARY_SUBSCR_DICT, 0, 0 } } }, - [LIST_APPEND] = { .nuops = 1, .uops = { { LIST_APPEND, 0, 0 } } }, - [SET_ADD] = { .nuops = 1, .uops = { { SET_ADD, 0, 0 } } }, - [STORE_SUBSCR] = { .nuops = 1, .uops = { { _STORE_SUBSCR, 0, 0 } } }, - [STORE_SUBSCR_LIST_INT] = { .nuops = 1, .uops = { { STORE_SUBSCR_LIST_INT, 0, 0 } } }, - [STORE_SUBSCR_DICT] = { .nuops = 1, .uops = { { STORE_SUBSCR_DICT, 0, 0 } } }, - [DELETE_SUBSCR] = { .nuops = 1, .uops = { { DELETE_SUBSCR, 0, 0 } } }, - [CALL_INTRINSIC_1] = { .nuops = 1, .uops = { { CALL_INTRINSIC_1, 0, 0 } } }, - [CALL_INTRINSIC_2] = { .nuops = 1, .uops = { { CALL_INTRINSIC_2, 0, 0 } } }, - [RETURN_VALUE] = { .nuops = 1, .uops = { { _POP_FRAME, 0, 0 } } }, - [RETURN_CONST] = { .nuops = 2, .uops = { { LOAD_CONST, 0, 0 }, { _POP_FRAME, 0, 0 } } }, - [GET_AITER] = { .nuops = 1, .uops = { { GET_AITER, 0, 0 } } }, - [GET_ANEXT] = { .nuops = 1, .uops = { { GET_ANEXT, 0, 0 } } }, - [GET_AWAITABLE] = { .nuops = 1, .uops = { { GET_AWAITABLE, 0, 0 } } }, - [POP_EXCEPT] = { .nuops = 1, .uops = { { POP_EXCEPT, 0, 0 } } }, - [LOAD_ASSERTION_ERROR] = { .nuops = 1, .uops = { { LOAD_ASSERTION_ERROR, 0, 0 } } }, - [LOAD_BUILD_CLASS] = { .nuops = 1, .uops = { { LOAD_BUILD_CLASS, 0, 0 } } }, - [STORE_NAME] = { .nuops = 1, .uops = { { STORE_NAME, 0, 0 } } }, - [DELETE_NAME] = { .nuops = 1, .uops = { { DELETE_NAME, 0, 0 } } }, - [UNPACK_SEQUENCE] = { .nuops = 1, .uops = { { _UNPACK_SEQUENCE, 0, 0 } } }, - [UNPACK_SEQUENCE_TWO_TUPLE] = { .nuops = 1, .uops = { { UNPACK_SEQUENCE_TWO_TUPLE, 0, 0 } } }, - [UNPACK_SEQUENCE_TUPLE] = { .nuops = 1, .uops = { { UNPACK_SEQUENCE_TUPLE, 0, 0 } } }, - [UNPACK_SEQUENCE_LIST] = { .nuops = 1, .uops = { { UNPACK_SEQUENCE_LIST, 0, 0 } } }, - [UNPACK_EX] = { .nuops = 1, .uops = { { UNPACK_EX, 0, 0 } } }, - [STORE_ATTR] = { .nuops = 1, .uops = { { _STORE_ATTR, 0, 0 } } }, - [DELETE_ATTR] = { .nuops = 1, .uops = { { DELETE_ATTR, 0, 0 } } }, - [STORE_GLOBAL] = { .nuops = 1, .uops = { { STORE_GLOBAL, 0, 0 } } }, - [DELETE_GLOBAL] = { .nuops = 1, .uops = { { DELETE_GLOBAL, 0, 0 } } }, - [LOAD_LOCALS] = { .nuops = 1, .uops = { { LOAD_LOCALS, 0, 0 } } }, - [LOAD_FROM_DICT_OR_GLOBALS] = { .nuops = 1, .uops = { { LOAD_FROM_DICT_OR_GLOBALS, 0, 0 } } }, - [LOAD_NAME] = { .nuops = 1, .uops = { { LOAD_NAME, 0, 0 } } }, - [LOAD_GLOBAL] = { .nuops = 1, .uops = { { _LOAD_GLOBAL, 0, 0 } } }, - [LOAD_GLOBAL_MODULE] = { .nuops = 2, .uops = { { _GUARD_GLOBALS_VERSION, 1, 1 }, { _LOAD_GLOBAL_MODULE, 1, 3 } } }, - [LOAD_GLOBAL_BUILTIN] = { .nuops = 3, .uops = { { _GUARD_GLOBALS_VERSION, 1, 1 }, { _GUARD_BUILTINS_VERSION, 1, 2 }, { _LOAD_GLOBAL_BUILTINS, 1, 3 } } }, - [DELETE_FAST] = { .nuops = 1, .uops = { { DELETE_FAST, 0, 0 } } }, - [MAKE_CELL] = { .nuops = 1, .uops = { { MAKE_CELL, 0, 0 } } }, - [DELETE_DEREF] = { .nuops = 1, .uops = { { DELETE_DEREF, 0, 0 } } }, - [LOAD_FROM_DICT_OR_DEREF] = { .nuops = 1, .uops = { { LOAD_FROM_DICT_OR_DEREF, 0, 0 } } }, - [LOAD_DEREF] = { .nuops = 1, .uops = { { LOAD_DEREF, 0, 0 } } }, - [STORE_DEREF] = { .nuops = 1, .uops = { { STORE_DEREF, 0, 0 } } }, - [COPY_FREE_VARS] = { .nuops = 1, .uops = { { COPY_FREE_VARS, 0, 0 } } }, - [BUILD_STRING] = { .nuops = 1, .uops = { { BUILD_STRING, 0, 0 } } }, - [BUILD_TUPLE] = { .nuops = 1, .uops = { { BUILD_TUPLE, 0, 0 } } }, + [BUILD_CONST_KEY_MAP] = { .nuops = 1, .uops = { { BUILD_CONST_KEY_MAP, 0, 0 } } }, [BUILD_LIST] = { .nuops = 1, .uops = { { BUILD_LIST, 0, 0 } } }, - [LIST_EXTEND] = { .nuops = 1, .uops = { { LIST_EXTEND, 0, 0 } } }, - [SET_UPDATE] = { .nuops = 1, .uops = { { SET_UPDATE, 0, 0 } } }, - [BUILD_SET] = { .nuops = 1, .uops = { { BUILD_SET, 0, 0 } } }, [BUILD_MAP] = { .nuops = 1, .uops = { { BUILD_MAP, 0, 0 } } }, - [SETUP_ANNOTATIONS] = { .nuops = 1, .uops = { { SETUP_ANNOTATIONS, 0, 0 } } }, - [BUILD_CONST_KEY_MAP] = { .nuops = 1, .uops = { { BUILD_CONST_KEY_MAP, 0, 0 } } }, - [DICT_UPDATE] = { .nuops = 1, .uops = { { DICT_UPDATE, 0, 0 } } }, - [DICT_MERGE] = { .nuops = 1, .uops = { { DICT_MERGE, 0, 0 } } }, - [MAP_ADD] = { .nuops = 1, .uops = { { MAP_ADD, 0, 0 } } }, - [LOAD_SUPER_ATTR_ATTR] = { .nuops = 1, .uops = { { LOAD_SUPER_ATTR_ATTR, 0, 0 } } }, - [LOAD_SUPER_ATTR_METHOD] = { .nuops = 1, .uops = { { LOAD_SUPER_ATTR_METHOD, 0, 0 } } }, - [LOAD_ATTR] = { .nuops = 1, .uops = { { _LOAD_ATTR, 0, 0 } } }, - [LOAD_ATTR_INSTANCE_VALUE] = { .nuops = 3, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _CHECK_MANAGED_OBJECT_HAS_VALUES, 0, 0 }, { _LOAD_ATTR_INSTANCE_VALUE, 1, 3 } } }, - [LOAD_ATTR_MODULE] = { .nuops = 2, .uops = { { _CHECK_ATTR_MODULE, 2, 1 }, { _LOAD_ATTR_MODULE, 1, 3 } } }, - [LOAD_ATTR_WITH_HINT] = { .nuops = 3, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _CHECK_ATTR_WITH_HINT, 0, 0 }, { _LOAD_ATTR_WITH_HINT, 1, 3 } } }, - [LOAD_ATTR_SLOT] = { .nuops = 2, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _LOAD_ATTR_SLOT, 1, 3 } } }, - [LOAD_ATTR_CLASS] = { .nuops = 2, .uops = { { _CHECK_ATTR_CLASS, 2, 1 }, { _LOAD_ATTR_CLASS, 4, 5 } } }, - [STORE_ATTR_INSTANCE_VALUE] = { .nuops = 3, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _GUARD_DORV_VALUES, 0, 0 }, { _STORE_ATTR_INSTANCE_VALUE, 1, 3 } } }, - [STORE_ATTR_SLOT] = { .nuops = 2, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _STORE_ATTR_SLOT, 1, 3 } } }, + [BUILD_SET] = { .nuops = 1, .uops = { { BUILD_SET, 0, 0 } } }, + [BUILD_SLICE] = { .nuops = 1, .uops = { { BUILD_SLICE, 0, 0 } } }, + [BUILD_STRING] = { .nuops = 1, .uops = { { BUILD_STRING, 0, 0 } } }, + [BUILD_TUPLE] = { .nuops = 1, .uops = { { BUILD_TUPLE, 0, 0 } } }, + [CALL_BOUND_METHOD_EXACT_ARGS] = { .nuops = 8, .uops = { { _CHECK_PEP_523, 0, 0 }, { _CHECK_CALL_BOUND_METHOD_EXACT_ARGS, 0, 0 }, { _INIT_CALL_BOUND_METHOD_EXACT_ARGS, 0, 0 }, { _CHECK_FUNCTION_EXACT_ARGS, 2, 1 }, { _CHECK_STACK_SPACE, 0, 0 }, { _INIT_CALL_PY_EXACT_ARGS, 0, 0 }, { _SAVE_RETURN_OFFSET, 7, 3 }, { _PUSH_FRAME, 0, 0 } } }, + [CALL_BUILTIN_CLASS] = { .nuops = 1, .uops = { { CALL_BUILTIN_CLASS, 0, 0 } } }, + [CALL_BUILTIN_FAST] = { .nuops = 1, .uops = { { CALL_BUILTIN_FAST, 0, 0 } } }, + [CALL_BUILTIN_FAST_WITH_KEYWORDS] = { .nuops = 1, .uops = { { CALL_BUILTIN_FAST_WITH_KEYWORDS, 0, 0 } } }, + [CALL_BUILTIN_O] = { .nuops = 1, .uops = { { CALL_BUILTIN_O, 0, 0 } } }, + [CALL_INTRINSIC_1] = { .nuops = 1, .uops = { { CALL_INTRINSIC_1, 0, 0 } } }, + [CALL_INTRINSIC_2] = { .nuops = 1, .uops = { { CALL_INTRINSIC_2, 0, 0 } } }, + [CALL_ISINSTANCE] = { .nuops = 1, .uops = { { CALL_ISINSTANCE, 0, 0 } } }, + [CALL_LEN] = { .nuops = 1, .uops = { { CALL_LEN, 0, 0 } } }, + [CALL_METHOD_DESCRIPTOR_FAST] = { .nuops = 1, .uops = { { CALL_METHOD_DESCRIPTOR_FAST, 0, 0 } } }, + [CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = { .nuops = 1, .uops = { { CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS, 0, 0 } } }, + [CALL_METHOD_DESCRIPTOR_NOARGS] = { .nuops = 1, .uops = { { CALL_METHOD_DESCRIPTOR_NOARGS, 0, 0 } } }, + [CALL_METHOD_DESCRIPTOR_O] = { .nuops = 1, .uops = { { CALL_METHOD_DESCRIPTOR_O, 0, 0 } } }, + [CALL_PY_EXACT_ARGS] = { .nuops = 6, .uops = { { _CHECK_PEP_523, 0, 0 }, { _CHECK_FUNCTION_EXACT_ARGS, 2, 1 }, { _CHECK_STACK_SPACE, 0, 0 }, { _INIT_CALL_PY_EXACT_ARGS, 0, 0 }, { _SAVE_RETURN_OFFSET, 7, 3 }, { _PUSH_FRAME, 0, 0 } } }, + [CALL_STR_1] = { .nuops = 1, .uops = { { CALL_STR_1, 0, 0 } } }, + [CALL_TUPLE_1] = { .nuops = 1, .uops = { { CALL_TUPLE_1, 0, 0 } } }, + [CALL_TYPE_1] = { .nuops = 1, .uops = { { CALL_TYPE_1, 0, 0 } } }, + [CHECK_EG_MATCH] = { .nuops = 1, .uops = { { CHECK_EG_MATCH, 0, 0 } } }, + [CHECK_EXC_MATCH] = { .nuops = 1, .uops = { { CHECK_EXC_MATCH, 0, 0 } } }, [COMPARE_OP] = { .nuops = 1, .uops = { { _COMPARE_OP, 0, 0 } } }, [COMPARE_OP_FLOAT] = { .nuops = 1, .uops = { { COMPARE_OP_FLOAT, 0, 0 } } }, [COMPARE_OP_INT] = { .nuops = 1, .uops = { { COMPARE_OP_INT, 0, 0 } } }, [COMPARE_OP_STR] = { .nuops = 1, .uops = { { COMPARE_OP_STR, 0, 0 } } }, - [IS_OP] = { .nuops = 1, .uops = { { IS_OP, 0, 0 } } }, [CONTAINS_OP] = { .nuops = 1, .uops = { { CONTAINS_OP, 0, 0 } } }, - [CHECK_EG_MATCH] = { .nuops = 1, .uops = { { CHECK_EG_MATCH, 0, 0 } } }, - [CHECK_EXC_MATCH] = { .nuops = 1, .uops = { { CHECK_EXC_MATCH, 0, 0 } } }, - [POP_JUMP_IF_TRUE] = { .nuops = 1, .uops = { { _POP_JUMP_IF_TRUE, 0, 0 } } }, - [POP_JUMP_IF_FALSE] = { .nuops = 1, .uops = { { _POP_JUMP_IF_FALSE, 0, 0 } } }, - [POP_JUMP_IF_NONE] = { .nuops = 2, .uops = { { _IS_NONE, 0, 0 }, { _POP_JUMP_IF_TRUE, 0, 0 } } }, - [POP_JUMP_IF_NOT_NONE] = { .nuops = 2, .uops = { { _IS_NONE, 0, 0 }, { _POP_JUMP_IF_FALSE, 0, 0 } } }, - [GET_LEN] = { .nuops = 1, .uops = { { GET_LEN, 0, 0 } } }, - [MATCH_CLASS] = { .nuops = 1, .uops = { { MATCH_CLASS, 0, 0 } } }, - [MATCH_MAPPING] = { .nuops = 1, .uops = { { MATCH_MAPPING, 0, 0 } } }, - [MATCH_SEQUENCE] = { .nuops = 1, .uops = { { MATCH_SEQUENCE, 0, 0 } } }, - [MATCH_KEYS] = { .nuops = 1, .uops = { { MATCH_KEYS, 0, 0 } } }, - [GET_ITER] = { .nuops = 1, .uops = { { GET_ITER, 0, 0 } } }, - [GET_YIELD_FROM_ITER] = { .nuops = 1, .uops = { { GET_YIELD_FROM_ITER, 0, 0 } } }, + [CONVERT_VALUE] = { .nuops = 1, .uops = { { CONVERT_VALUE, 0, 0 } } }, + [COPY] = { .nuops = 1, .uops = { { COPY, 0, 0 } } }, + [COPY_FREE_VARS] = { .nuops = 1, .uops = { { COPY_FREE_VARS, 0, 0 } } }, + [DELETE_ATTR] = { .nuops = 1, .uops = { { DELETE_ATTR, 0, 0 } } }, + [DELETE_DEREF] = { .nuops = 1, .uops = { { DELETE_DEREF, 0, 0 } } }, + [DELETE_FAST] = { .nuops = 1, .uops = { { DELETE_FAST, 0, 0 } } }, + [DELETE_GLOBAL] = { .nuops = 1, .uops = { { DELETE_GLOBAL, 0, 0 } } }, + [DELETE_NAME] = { .nuops = 1, .uops = { { DELETE_NAME, 0, 0 } } }, + [DELETE_SUBSCR] = { .nuops = 1, .uops = { { DELETE_SUBSCR, 0, 0 } } }, + [DICT_MERGE] = { .nuops = 1, .uops = { { DICT_MERGE, 0, 0 } } }, + [DICT_UPDATE] = { .nuops = 1, .uops = { { DICT_UPDATE, 0, 0 } } }, + [END_FOR] = { .nuops = 2, .uops = { { POP_TOP, 0, 0 }, { POP_TOP, 0, 0 } } }, + [END_SEND] = { .nuops = 1, .uops = { { END_SEND, 0, 0 } } }, + [EXIT_INIT_CHECK] = { .nuops = 1, .uops = { { EXIT_INIT_CHECK, 0, 0 } } }, + [FORMAT_SIMPLE] = { .nuops = 1, .uops = { { FORMAT_SIMPLE, 0, 0 } } }, + [FORMAT_WITH_SPEC] = { .nuops = 1, .uops = { { FORMAT_WITH_SPEC, 0, 0 } } }, [FOR_ITER] = { .nuops = 1, .uops = { { _FOR_ITER, 0, 0 } } }, [FOR_ITER_LIST] = { .nuops = 3, .uops = { { _ITER_CHECK_LIST, 0, 0 }, { _ITER_JUMP_LIST, 0, 0 }, { _ITER_NEXT_LIST, 0, 0 } } }, - [FOR_ITER_TUPLE] = { .nuops = 3, .uops = { { _ITER_CHECK_TUPLE, 0, 0 }, { _ITER_JUMP_TUPLE, 0, 0 }, { _ITER_NEXT_TUPLE, 0, 0 } } }, [FOR_ITER_RANGE] = { .nuops = 3, .uops = { { _ITER_CHECK_RANGE, 0, 0 }, { _ITER_JUMP_RANGE, 0, 0 }, { _ITER_NEXT_RANGE, 0, 0 } } }, - [BEFORE_ASYNC_WITH] = { .nuops = 1, .uops = { { BEFORE_ASYNC_WITH, 0, 0 } } }, - [BEFORE_WITH] = { .nuops = 1, .uops = { { BEFORE_WITH, 0, 0 } } }, - [WITH_EXCEPT_START] = { .nuops = 1, .uops = { { WITH_EXCEPT_START, 0, 0 } } }, - [PUSH_EXC_INFO] = { .nuops = 1, .uops = { { PUSH_EXC_INFO, 0, 0 } } }, - [LOAD_ATTR_METHOD_WITH_VALUES] = { .nuops = 4, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT, 0, 0 }, { _GUARD_KEYS_VERSION, 2, 3 }, { _LOAD_ATTR_METHOD_WITH_VALUES, 4, 5 } } }, + [FOR_ITER_TUPLE] = { .nuops = 3, .uops = { { _ITER_CHECK_TUPLE, 0, 0 }, { _ITER_JUMP_TUPLE, 0, 0 }, { _ITER_NEXT_TUPLE, 0, 0 } } }, + [GET_AITER] = { .nuops = 1, .uops = { { GET_AITER, 0, 0 } } }, + [GET_ANEXT] = { .nuops = 1, .uops = { { GET_ANEXT, 0, 0 } } }, + [GET_AWAITABLE] = { .nuops = 1, .uops = { { GET_AWAITABLE, 0, 0 } } }, + [GET_ITER] = { .nuops = 1, .uops = { { GET_ITER, 0, 0 } } }, + [GET_LEN] = { .nuops = 1, .uops = { { GET_LEN, 0, 0 } } }, + [GET_YIELD_FROM_ITER] = { .nuops = 1, .uops = { { GET_YIELD_FROM_ITER, 0, 0 } } }, + [IS_OP] = { .nuops = 1, .uops = { { IS_OP, 0, 0 } } }, + [LIST_APPEND] = { .nuops = 1, .uops = { { LIST_APPEND, 0, 0 } } }, + [LIST_EXTEND] = { .nuops = 1, .uops = { { LIST_EXTEND, 0, 0 } } }, + [LOAD_ASSERTION_ERROR] = { .nuops = 1, .uops = { { LOAD_ASSERTION_ERROR, 0, 0 } } }, + [LOAD_ATTR] = { .nuops = 1, .uops = { { _LOAD_ATTR, 0, 0 } } }, + [LOAD_ATTR_CLASS] = { .nuops = 2, .uops = { { _CHECK_ATTR_CLASS, 2, 1 }, { _LOAD_ATTR_CLASS, 4, 5 } } }, + [LOAD_ATTR_INSTANCE_VALUE] = { .nuops = 3, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _CHECK_MANAGED_OBJECT_HAS_VALUES, 0, 0 }, { _LOAD_ATTR_INSTANCE_VALUE, 1, 3 } } }, + [LOAD_ATTR_METHOD_LAZY_DICT] = { .nuops = 3, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _CHECK_ATTR_METHOD_LAZY_DICT, 0, 0 }, { _LOAD_ATTR_METHOD_LAZY_DICT, 4, 5 } } }, [LOAD_ATTR_METHOD_NO_DICT] = { .nuops = 2, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _LOAD_ATTR_METHOD_NO_DICT, 4, 5 } } }, - [LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = { .nuops = 4, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT, 0, 0 }, { _GUARD_KEYS_VERSION, 2, 3 }, { _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES, 4, 5 } } }, + [LOAD_ATTR_METHOD_WITH_VALUES] = { .nuops = 4, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT, 0, 0 }, { _GUARD_KEYS_VERSION, 2, 3 }, { _LOAD_ATTR_METHOD_WITH_VALUES, 4, 5 } } }, + [LOAD_ATTR_MODULE] = { .nuops = 2, .uops = { { _CHECK_ATTR_MODULE, 2, 1 }, { _LOAD_ATTR_MODULE, 1, 3 } } }, [LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = { .nuops = 2, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _LOAD_ATTR_NONDESCRIPTOR_NO_DICT, 4, 5 } } }, - [LOAD_ATTR_METHOD_LAZY_DICT] = { .nuops = 3, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _CHECK_ATTR_METHOD_LAZY_DICT, 0, 0 }, { _LOAD_ATTR_METHOD_LAZY_DICT, 4, 5 } } }, - [CALL_BOUND_METHOD_EXACT_ARGS] = { .nuops = 8, .uops = { { _CHECK_PEP_523, 0, 0 }, { _CHECK_CALL_BOUND_METHOD_EXACT_ARGS, 0, 0 }, { _INIT_CALL_BOUND_METHOD_EXACT_ARGS, 0, 0 }, { _CHECK_FUNCTION_EXACT_ARGS, 2, 1 }, { _CHECK_STACK_SPACE, 0, 0 }, { _INIT_CALL_PY_EXACT_ARGS, 0, 0 }, { _SAVE_RETURN_OFFSET, 7, 3 }, { _PUSH_FRAME, 0, 0 } } }, - [CALL_PY_EXACT_ARGS] = { .nuops = 6, .uops = { { _CHECK_PEP_523, 0, 0 }, { _CHECK_FUNCTION_EXACT_ARGS, 2, 1 }, { _CHECK_STACK_SPACE, 0, 0 }, { _INIT_CALL_PY_EXACT_ARGS, 0, 0 }, { _SAVE_RETURN_OFFSET, 7, 3 }, { _PUSH_FRAME, 0, 0 } } }, - [CALL_TYPE_1] = { .nuops = 1, .uops = { { CALL_TYPE_1, 0, 0 } } }, - [CALL_STR_1] = { .nuops = 1, .uops = { { CALL_STR_1, 0, 0 } } }, - [CALL_TUPLE_1] = { .nuops = 1, .uops = { { CALL_TUPLE_1, 0, 0 } } }, - [EXIT_INIT_CHECK] = { .nuops = 1, .uops = { { EXIT_INIT_CHECK, 0, 0 } } }, - [CALL_BUILTIN_CLASS] = { .nuops = 1, .uops = { { CALL_BUILTIN_CLASS, 0, 0 } } }, - [CALL_BUILTIN_O] = { .nuops = 1, .uops = { { CALL_BUILTIN_O, 0, 0 } } }, - [CALL_BUILTIN_FAST] = { .nuops = 1, .uops = { { CALL_BUILTIN_FAST, 0, 0 } } }, - [CALL_BUILTIN_FAST_WITH_KEYWORDS] = { .nuops = 1, .uops = { { CALL_BUILTIN_FAST_WITH_KEYWORDS, 0, 0 } } }, - [CALL_LEN] = { .nuops = 1, .uops = { { CALL_LEN, 0, 0 } } }, - [CALL_ISINSTANCE] = { .nuops = 1, .uops = { { CALL_ISINSTANCE, 0, 0 } } }, - [CALL_METHOD_DESCRIPTOR_O] = { .nuops = 1, .uops = { { CALL_METHOD_DESCRIPTOR_O, 0, 0 } } }, - [CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = { .nuops = 1, .uops = { { CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS, 0, 0 } } }, - [CALL_METHOD_DESCRIPTOR_NOARGS] = { .nuops = 1, .uops = { { CALL_METHOD_DESCRIPTOR_NOARGS, 0, 0 } } }, - [CALL_METHOD_DESCRIPTOR_FAST] = { .nuops = 1, .uops = { { CALL_METHOD_DESCRIPTOR_FAST, 0, 0 } } }, + [LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = { .nuops = 4, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT, 0, 0 }, { _GUARD_KEYS_VERSION, 2, 3 }, { _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES, 4, 5 } } }, + [LOAD_ATTR_SLOT] = { .nuops = 2, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _LOAD_ATTR_SLOT, 1, 3 } } }, + [LOAD_ATTR_WITH_HINT] = { .nuops = 3, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _CHECK_ATTR_WITH_HINT, 0, 0 }, { _LOAD_ATTR_WITH_HINT, 1, 3 } } }, + [LOAD_BUILD_CLASS] = { .nuops = 1, .uops = { { LOAD_BUILD_CLASS, 0, 0 } } }, + [LOAD_CONST] = { .nuops = 1, .uops = { { LOAD_CONST, 0, 0 } } }, + [LOAD_DEREF] = { .nuops = 1, .uops = { { LOAD_DEREF, 0, 0 } } }, + [LOAD_FAST] = { .nuops = 1, .uops = { { LOAD_FAST, 0, 0 } } }, + [LOAD_FAST_AND_CLEAR] = { .nuops = 1, .uops = { { LOAD_FAST_AND_CLEAR, 0, 0 } } }, + [LOAD_FAST_CHECK] = { .nuops = 1, .uops = { { LOAD_FAST_CHECK, 0, 0 } } }, + [LOAD_FAST_LOAD_FAST] = { .nuops = 2, .uops = { { LOAD_FAST, 5, 0 }, { LOAD_FAST, 6, 0 } } }, + [LOAD_FROM_DICT_OR_DEREF] = { .nuops = 1, .uops = { { LOAD_FROM_DICT_OR_DEREF, 0, 0 } } }, + [LOAD_FROM_DICT_OR_GLOBALS] = { .nuops = 1, .uops = { { LOAD_FROM_DICT_OR_GLOBALS, 0, 0 } } }, + [LOAD_GLOBAL] = { .nuops = 1, .uops = { { _LOAD_GLOBAL, 0, 0 } } }, + [LOAD_GLOBAL_BUILTIN] = { .nuops = 3, .uops = { { _GUARD_GLOBALS_VERSION, 1, 1 }, { _GUARD_BUILTINS_VERSION, 1, 2 }, { _LOAD_GLOBAL_BUILTINS, 1, 3 } } }, + [LOAD_GLOBAL_MODULE] = { .nuops = 2, .uops = { { _GUARD_GLOBALS_VERSION, 1, 1 }, { _LOAD_GLOBAL_MODULE, 1, 3 } } }, + [LOAD_LOCALS] = { .nuops = 1, .uops = { { LOAD_LOCALS, 0, 0 } } }, + [LOAD_NAME] = { .nuops = 1, .uops = { { LOAD_NAME, 0, 0 } } }, + [LOAD_SUPER_ATTR_ATTR] = { .nuops = 1, .uops = { { LOAD_SUPER_ATTR_ATTR, 0, 0 } } }, + [LOAD_SUPER_ATTR_METHOD] = { .nuops = 1, .uops = { { LOAD_SUPER_ATTR_METHOD, 0, 0 } } }, + [MAKE_CELL] = { .nuops = 1, .uops = { { MAKE_CELL, 0, 0 } } }, [MAKE_FUNCTION] = { .nuops = 1, .uops = { { MAKE_FUNCTION, 0, 0 } } }, + [MAP_ADD] = { .nuops = 1, .uops = { { MAP_ADD, 0, 0 } } }, + [MATCH_CLASS] = { .nuops = 1, .uops = { { MATCH_CLASS, 0, 0 } } }, + [MATCH_KEYS] = { .nuops = 1, .uops = { { MATCH_KEYS, 0, 0 } } }, + [MATCH_MAPPING] = { .nuops = 1, .uops = { { MATCH_MAPPING, 0, 0 } } }, + [MATCH_SEQUENCE] = { .nuops = 1, .uops = { { MATCH_SEQUENCE, 0, 0 } } }, + [NOP] = { .nuops = 1, .uops = { { NOP, 0, 0 } } }, + [POP_EXCEPT] = { .nuops = 1, .uops = { { POP_EXCEPT, 0, 0 } } }, + [POP_JUMP_IF_FALSE] = { .nuops = 1, .uops = { { _POP_JUMP_IF_FALSE, 0, 0 } } }, + [POP_JUMP_IF_NONE] = { .nuops = 2, .uops = { { _IS_NONE, 0, 0 }, { _POP_JUMP_IF_TRUE, 0, 0 } } }, + [POP_JUMP_IF_NOT_NONE] = { .nuops = 2, .uops = { { _IS_NONE, 0, 0 }, { _POP_JUMP_IF_FALSE, 0, 0 } } }, + [POP_JUMP_IF_TRUE] = { .nuops = 1, .uops = { { _POP_JUMP_IF_TRUE, 0, 0 } } }, + [POP_TOP] = { .nuops = 1, .uops = { { POP_TOP, 0, 0 } } }, + [PUSH_EXC_INFO] = { .nuops = 1, .uops = { { PUSH_EXC_INFO, 0, 0 } } }, + [PUSH_NULL] = { .nuops = 1, .uops = { { PUSH_NULL, 0, 0 } } }, + [RESUME_CHECK] = { .nuops = 1, .uops = { { RESUME_CHECK, 0, 0 } } }, + [RETURN_CONST] = { .nuops = 2, .uops = { { LOAD_CONST, 0, 0 }, { _POP_FRAME, 0, 0 } } }, + [RETURN_VALUE] = { .nuops = 1, .uops = { { _POP_FRAME, 0, 0 } } }, + [SETUP_ANNOTATIONS] = { .nuops = 1, .uops = { { SETUP_ANNOTATIONS, 0, 0 } } }, + [SET_ADD] = { .nuops = 1, .uops = { { SET_ADD, 0, 0 } } }, [SET_FUNCTION_ATTRIBUTE] = { .nuops = 1, .uops = { { SET_FUNCTION_ATTRIBUTE, 0, 0 } } }, - [BUILD_SLICE] = { .nuops = 1, .uops = { { BUILD_SLICE, 0, 0 } } }, - [CONVERT_VALUE] = { .nuops = 1, .uops = { { CONVERT_VALUE, 0, 0 } } }, - [FORMAT_SIMPLE] = { .nuops = 1, .uops = { { FORMAT_SIMPLE, 0, 0 } } }, - [FORMAT_WITH_SPEC] = { .nuops = 1, .uops = { { FORMAT_WITH_SPEC, 0, 0 } } }, - [COPY] = { .nuops = 1, .uops = { { COPY, 0, 0 } } }, - [BINARY_OP] = { .nuops = 1, .uops = { { _BINARY_OP, 0, 0 } } }, + [SET_UPDATE] = { .nuops = 1, .uops = { { SET_UPDATE, 0, 0 } } }, + [STORE_ATTR] = { .nuops = 1, .uops = { { _STORE_ATTR, 0, 0 } } }, + [STORE_ATTR_INSTANCE_VALUE] = { .nuops = 3, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _GUARD_DORV_VALUES, 0, 0 }, { _STORE_ATTR_INSTANCE_VALUE, 1, 3 } } }, + [STORE_ATTR_SLOT] = { .nuops = 2, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _STORE_ATTR_SLOT, 1, 3 } } }, + [STORE_DEREF] = { .nuops = 1, .uops = { { STORE_DEREF, 0, 0 } } }, + [STORE_FAST] = { .nuops = 1, .uops = { { STORE_FAST, 0, 0 } } }, + [STORE_FAST_LOAD_FAST] = { .nuops = 2, .uops = { { STORE_FAST, 5, 0 }, { LOAD_FAST, 6, 0 } } }, + [STORE_FAST_STORE_FAST] = { .nuops = 2, .uops = { { STORE_FAST, 5, 0 }, { STORE_FAST, 6, 0 } } }, + [STORE_GLOBAL] = { .nuops = 1, .uops = { { STORE_GLOBAL, 0, 0 } } }, + [STORE_NAME] = { .nuops = 1, .uops = { { STORE_NAME, 0, 0 } } }, + [STORE_SLICE] = { .nuops = 1, .uops = { { STORE_SLICE, 0, 0 } } }, + [STORE_SUBSCR] = { .nuops = 1, .uops = { { _STORE_SUBSCR, 0, 0 } } }, + [STORE_SUBSCR_DICT] = { .nuops = 1, .uops = { { STORE_SUBSCR_DICT, 0, 0 } } }, + [STORE_SUBSCR_LIST_INT] = { .nuops = 1, .uops = { { STORE_SUBSCR_LIST_INT, 0, 0 } } }, [SWAP] = { .nuops = 1, .uops = { { SWAP, 0, 0 } } }, + [TO_BOOL] = { .nuops = 1, .uops = { { _TO_BOOL, 0, 0 } } }, + [TO_BOOL_ALWAYS_TRUE] = { .nuops = 1, .uops = { { TO_BOOL_ALWAYS_TRUE, 2, 1 } } }, + [TO_BOOL_BOOL] = { .nuops = 1, .uops = { { TO_BOOL_BOOL, 0, 0 } } }, + [TO_BOOL_INT] = { .nuops = 1, .uops = { { TO_BOOL_INT, 0, 0 } } }, + [TO_BOOL_LIST] = { .nuops = 1, .uops = { { TO_BOOL_LIST, 0, 0 } } }, + [TO_BOOL_NONE] = { .nuops = 1, .uops = { { TO_BOOL_NONE, 0, 0 } } }, + [TO_BOOL_STR] = { .nuops = 1, .uops = { { TO_BOOL_STR, 0, 0 } } }, + [UNARY_INVERT] = { .nuops = 1, .uops = { { UNARY_INVERT, 0, 0 } } }, + [UNARY_NEGATIVE] = { .nuops = 1, .uops = { { UNARY_NEGATIVE, 0, 0 } } }, + [UNARY_NOT] = { .nuops = 1, .uops = { { UNARY_NOT, 0, 0 } } }, + [UNPACK_EX] = { .nuops = 1, .uops = { { UNPACK_EX, 0, 0 } } }, + [UNPACK_SEQUENCE] = { .nuops = 1, .uops = { { _UNPACK_SEQUENCE, 0, 0 } } }, + [UNPACK_SEQUENCE_LIST] = { .nuops = 1, .uops = { { UNPACK_SEQUENCE_LIST, 0, 0 } } }, + [UNPACK_SEQUENCE_TUPLE] = { .nuops = 1, .uops = { { UNPACK_SEQUENCE_TUPLE, 0, 0 } } }, + [UNPACK_SEQUENCE_TWO_TUPLE] = { .nuops = 1, .uops = { { UNPACK_SEQUENCE_TWO_TUPLE, 0, 0 } } }, + [WITH_EXCEPT_START] = { .nuops = 1, .uops = { { WITH_EXCEPT_START, 0, 0 } } }, }; #endif // NEED_OPCODE_METADATA @@ -1863,153 +1863,124 @@ extern const char * const _PyOpcode_uop_name[OPCODE_UOP_NAME_SIZE]; const char * const _PyOpcode_uop_name[OPCODE_UOP_NAME_SIZE] = { [_EXIT_TRACE] = "_EXIT_TRACE", [_SET_IP] = "_SET_IP", - [_SPECIALIZE_TO_BOOL] = "_SPECIALIZE_TO_BOOL", - [_TO_BOOL] = "_TO_BOOL", - [_GUARD_BOTH_INT] = "_GUARD_BOTH_INT", - [_BINARY_OP_MULTIPLY_INT] = "_BINARY_OP_MULTIPLY_INT", - [_BINARY_OP_ADD_INT] = "_BINARY_OP_ADD_INT", - [_BINARY_OP_SUBTRACT_INT] = "_BINARY_OP_SUBTRACT_INT", - [_GUARD_BOTH_FLOAT] = "_GUARD_BOTH_FLOAT", - [_BINARY_OP_MULTIPLY_FLOAT] = "_BINARY_OP_MULTIPLY_FLOAT", + [_BINARY_OP] = "_BINARY_OP", [_BINARY_OP_ADD_FLOAT] = "_BINARY_OP_ADD_FLOAT", - [_BINARY_OP_SUBTRACT_FLOAT] = "_BINARY_OP_SUBTRACT_FLOAT", - [_GUARD_BOTH_UNICODE] = "_GUARD_BOTH_UNICODE", + [_BINARY_OP_ADD_INT] = "_BINARY_OP_ADD_INT", [_BINARY_OP_ADD_UNICODE] = "_BINARY_OP_ADD_UNICODE", [_BINARY_OP_INPLACE_ADD_UNICODE] = "_BINARY_OP_INPLACE_ADD_UNICODE", - [_SPECIALIZE_BINARY_SUBSCR] = "_SPECIALIZE_BINARY_SUBSCR", + [_BINARY_OP_MULTIPLY_FLOAT] = "_BINARY_OP_MULTIPLY_FLOAT", + [_BINARY_OP_MULTIPLY_INT] = "_BINARY_OP_MULTIPLY_INT", + [_BINARY_OP_SUBTRACT_FLOAT] = "_BINARY_OP_SUBTRACT_FLOAT", + [_BINARY_OP_SUBTRACT_INT] = "_BINARY_OP_SUBTRACT_INT", [_BINARY_SUBSCR] = "_BINARY_SUBSCR", - [_SPECIALIZE_STORE_SUBSCR] = "_SPECIALIZE_STORE_SUBSCR", - [_STORE_SUBSCR] = "_STORE_SUBSCR", - [_POP_FRAME] = "_POP_FRAME", - [_SPECIALIZE_SEND] = "_SPECIALIZE_SEND", - [_SEND] = "_SEND", - [_SPECIALIZE_UNPACK_SEQUENCE] = "_SPECIALIZE_UNPACK_SEQUENCE", - [_UNPACK_SEQUENCE] = "_UNPACK_SEQUENCE", - [_SPECIALIZE_STORE_ATTR] = "_SPECIALIZE_STORE_ATTR", - [_STORE_ATTR] = "_STORE_ATTR", - [_SPECIALIZE_LOAD_GLOBAL] = "_SPECIALIZE_LOAD_GLOBAL", - [_LOAD_GLOBAL] = "_LOAD_GLOBAL", - [_GUARD_GLOBALS_VERSION] = "_GUARD_GLOBALS_VERSION", - [_GUARD_BUILTINS_VERSION] = "_GUARD_BUILTINS_VERSION", - [_LOAD_GLOBAL_MODULE] = "_LOAD_GLOBAL_MODULE", - [_LOAD_GLOBAL_BUILTINS] = "_LOAD_GLOBAL_BUILTINS", - [_SPECIALIZE_LOAD_SUPER_ATTR] = "_SPECIALIZE_LOAD_SUPER_ATTR", - [_LOAD_SUPER_ATTR] = "_LOAD_SUPER_ATTR", - [_SPECIALIZE_LOAD_ATTR] = "_SPECIALIZE_LOAD_ATTR", - [_LOAD_ATTR] = "_LOAD_ATTR", - [_GUARD_TYPE_VERSION] = "_GUARD_TYPE_VERSION", - [_CHECK_MANAGED_OBJECT_HAS_VALUES] = "_CHECK_MANAGED_OBJECT_HAS_VALUES", - [_LOAD_ATTR_INSTANCE_VALUE] = "_LOAD_ATTR_INSTANCE_VALUE", + [_CALL] = "_CALL", + [_CHECK_ATTR_CLASS] = "_CHECK_ATTR_CLASS", + [_CHECK_ATTR_METHOD_LAZY_DICT] = "_CHECK_ATTR_METHOD_LAZY_DICT", [_CHECK_ATTR_MODULE] = "_CHECK_ATTR_MODULE", - [_LOAD_ATTR_MODULE] = "_LOAD_ATTR_MODULE", [_CHECK_ATTR_WITH_HINT] = "_CHECK_ATTR_WITH_HINT", - [_LOAD_ATTR_WITH_HINT] = "_LOAD_ATTR_WITH_HINT", - [_LOAD_ATTR_SLOT] = "_LOAD_ATTR_SLOT", - [_CHECK_ATTR_CLASS] = "_CHECK_ATTR_CLASS", - [_LOAD_ATTR_CLASS] = "_LOAD_ATTR_CLASS", - [_GUARD_DORV_VALUES] = "_GUARD_DORV_VALUES", - [_STORE_ATTR_INSTANCE_VALUE] = "_STORE_ATTR_INSTANCE_VALUE", - [_STORE_ATTR_SLOT] = "_STORE_ATTR_SLOT", - [_SPECIALIZE_COMPARE_OP] = "_SPECIALIZE_COMPARE_OP", + [_CHECK_CALL_BOUND_METHOD_EXACT_ARGS] = "_CHECK_CALL_BOUND_METHOD_EXACT_ARGS", + [_CHECK_FUNCTION_EXACT_ARGS] = "_CHECK_FUNCTION_EXACT_ARGS", + [_CHECK_MANAGED_OBJECT_HAS_VALUES] = "_CHECK_MANAGED_OBJECT_HAS_VALUES", + [_CHECK_PEP_523] = "_CHECK_PEP_523", + [_CHECK_STACK_SPACE] = "_CHECK_STACK_SPACE", + [_CHECK_VALIDITY] = "_CHECK_VALIDITY", [_COMPARE_OP] = "_COMPARE_OP", - [_POP_JUMP_IF_FALSE] = "_POP_JUMP_IF_FALSE", - [_POP_JUMP_IF_TRUE] = "_POP_JUMP_IF_TRUE", - [_IS_NONE] = "_IS_NONE", - [_SPECIALIZE_FOR_ITER] = "_SPECIALIZE_FOR_ITER", [_FOR_ITER] = "_FOR_ITER", [_FOR_ITER_TIER_TWO] = "_FOR_ITER_TIER_TWO", - [_ITER_CHECK_LIST] = "_ITER_CHECK_LIST", - [_ITER_JUMP_LIST] = "_ITER_JUMP_LIST", + [_GUARD_BOTH_FLOAT] = "_GUARD_BOTH_FLOAT", + [_GUARD_BOTH_INT] = "_GUARD_BOTH_INT", + [_GUARD_BOTH_UNICODE] = "_GUARD_BOTH_UNICODE", + [_GUARD_BUILTINS_VERSION] = "_GUARD_BUILTINS_VERSION", + [_GUARD_DORV_VALUES] = "_GUARD_DORV_VALUES", + [_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT] = "_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT", + [_GUARD_GLOBALS_VERSION] = "_GUARD_GLOBALS_VERSION", + [_GUARD_IS_FALSE_POP] = "_GUARD_IS_FALSE_POP", + [_GUARD_IS_NONE_POP] = "_GUARD_IS_NONE_POP", + [_GUARD_IS_NOT_NONE_POP] = "_GUARD_IS_NOT_NONE_POP", + [_GUARD_IS_TRUE_POP] = "_GUARD_IS_TRUE_POP", + [_GUARD_KEYS_VERSION] = "_GUARD_KEYS_VERSION", [_GUARD_NOT_EXHAUSTED_LIST] = "_GUARD_NOT_EXHAUSTED_LIST", - [_ITER_NEXT_LIST] = "_ITER_NEXT_LIST", - [_ITER_CHECK_TUPLE] = "_ITER_CHECK_TUPLE", - [_ITER_JUMP_TUPLE] = "_ITER_JUMP_TUPLE", + [_GUARD_NOT_EXHAUSTED_RANGE] = "_GUARD_NOT_EXHAUSTED_RANGE", [_GUARD_NOT_EXHAUSTED_TUPLE] = "_GUARD_NOT_EXHAUSTED_TUPLE", - [_ITER_NEXT_TUPLE] = "_ITER_NEXT_TUPLE", + [_GUARD_TYPE_VERSION] = "_GUARD_TYPE_VERSION", + [_INIT_CALL_BOUND_METHOD_EXACT_ARGS] = "_INIT_CALL_BOUND_METHOD_EXACT_ARGS", + [_INIT_CALL_PY_EXACT_ARGS] = "_INIT_CALL_PY_EXACT_ARGS", + [_INSERT] = "_INSERT", + [_IS_NONE] = "_IS_NONE", + [_ITER_CHECK_LIST] = "_ITER_CHECK_LIST", [_ITER_CHECK_RANGE] = "_ITER_CHECK_RANGE", + [_ITER_CHECK_TUPLE] = "_ITER_CHECK_TUPLE", + [_ITER_JUMP_LIST] = "_ITER_JUMP_LIST", [_ITER_JUMP_RANGE] = "_ITER_JUMP_RANGE", - [_GUARD_NOT_EXHAUSTED_RANGE] = "_GUARD_NOT_EXHAUSTED_RANGE", + [_ITER_JUMP_TUPLE] = "_ITER_JUMP_TUPLE", + [_ITER_NEXT_LIST] = "_ITER_NEXT_LIST", [_ITER_NEXT_RANGE] = "_ITER_NEXT_RANGE", - [_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT] = "_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT", - [_GUARD_KEYS_VERSION] = "_GUARD_KEYS_VERSION", - [_LOAD_ATTR_METHOD_WITH_VALUES] = "_LOAD_ATTR_METHOD_WITH_VALUES", + [_ITER_NEXT_TUPLE] = "_ITER_NEXT_TUPLE", + [_JUMP_TO_TOP] = "_JUMP_TO_TOP", + [_LOAD_ATTR] = "_LOAD_ATTR", + [_LOAD_ATTR_CLASS] = "_LOAD_ATTR_CLASS", + [_LOAD_ATTR_INSTANCE_VALUE] = "_LOAD_ATTR_INSTANCE_VALUE", + [_LOAD_ATTR_METHOD_LAZY_DICT] = "_LOAD_ATTR_METHOD_LAZY_DICT", [_LOAD_ATTR_METHOD_NO_DICT] = "_LOAD_ATTR_METHOD_NO_DICT", - [_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = "_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES", + [_LOAD_ATTR_METHOD_WITH_VALUES] = "_LOAD_ATTR_METHOD_WITH_VALUES", + [_LOAD_ATTR_MODULE] = "_LOAD_ATTR_MODULE", [_LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = "_LOAD_ATTR_NONDESCRIPTOR_NO_DICT", - [_CHECK_ATTR_METHOD_LAZY_DICT] = "_CHECK_ATTR_METHOD_LAZY_DICT", - [_LOAD_ATTR_METHOD_LAZY_DICT] = "_LOAD_ATTR_METHOD_LAZY_DICT", - [_SPECIALIZE_CALL] = "_SPECIALIZE_CALL", - [_CALL] = "_CALL", - [_CHECK_CALL_BOUND_METHOD_EXACT_ARGS] = "_CHECK_CALL_BOUND_METHOD_EXACT_ARGS", - [_INIT_CALL_BOUND_METHOD_EXACT_ARGS] = "_INIT_CALL_BOUND_METHOD_EXACT_ARGS", - [_CHECK_PEP_523] = "_CHECK_PEP_523", - [_CHECK_FUNCTION_EXACT_ARGS] = "_CHECK_FUNCTION_EXACT_ARGS", - [_CHECK_STACK_SPACE] = "_CHECK_STACK_SPACE", - [_INIT_CALL_PY_EXACT_ARGS] = "_INIT_CALL_PY_EXACT_ARGS", + [_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = "_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES", + [_LOAD_ATTR_SLOT] = "_LOAD_ATTR_SLOT", + [_LOAD_ATTR_WITH_HINT] = "_LOAD_ATTR_WITH_HINT", + [_LOAD_GLOBAL] = "_LOAD_GLOBAL", + [_LOAD_GLOBAL_BUILTINS] = "_LOAD_GLOBAL_BUILTINS", + [_LOAD_GLOBAL_MODULE] = "_LOAD_GLOBAL_MODULE", + [_LOAD_SUPER_ATTR] = "_LOAD_SUPER_ATTR", + [_POP_FRAME] = "_POP_FRAME", + [_POP_JUMP_IF_FALSE] = "_POP_JUMP_IF_FALSE", + [_POP_JUMP_IF_TRUE] = "_POP_JUMP_IF_TRUE", [_PUSH_FRAME] = "_PUSH_FRAME", - [_SPECIALIZE_BINARY_OP] = "_SPECIALIZE_BINARY_OP", - [_BINARY_OP] = "_BINARY_OP", - [_GUARD_IS_TRUE_POP] = "_GUARD_IS_TRUE_POP", - [_GUARD_IS_FALSE_POP] = "_GUARD_IS_FALSE_POP", - [_GUARD_IS_NONE_POP] = "_GUARD_IS_NONE_POP", - [_GUARD_IS_NOT_NONE_POP] = "_GUARD_IS_NOT_NONE_POP", - [_JUMP_TO_TOP] = "_JUMP_TO_TOP", [_SAVE_RETURN_OFFSET] = "_SAVE_RETURN_OFFSET", - [_INSERT] = "_INSERT", - [_CHECK_VALIDITY] = "_CHECK_VALIDITY", -}; -#endif // NEED_OPCODE_METADATA - -extern const char *const _PyOpcode_OpName[268]; -#ifdef NEED_OPCODE_METADATA -const char *const _PyOpcode_OpName[268] = { - [CACHE] = "CACHE", - [RESERVED] = "RESERVED", - [RESUME] = "RESUME", - [BEFORE_ASYNC_WITH] = "BEFORE_ASYNC_WITH", - [BEFORE_WITH] = "BEFORE_WITH", - [BINARY_OP_INPLACE_ADD_UNICODE] = "BINARY_OP_INPLACE_ADD_UNICODE", - [BINARY_SLICE] = "BINARY_SLICE", - [BINARY_SUBSCR] = "BINARY_SUBSCR", - [CHECK_EG_MATCH] = "CHECK_EG_MATCH", - [CHECK_EXC_MATCH] = "CHECK_EXC_MATCH", - [CLEANUP_THROW] = "CLEANUP_THROW", - [DELETE_SUBSCR] = "DELETE_SUBSCR", - [END_ASYNC_FOR] = "END_ASYNC_FOR", - [END_FOR] = "END_FOR", - [END_SEND] = "END_SEND", - [EXIT_INIT_CHECK] = "EXIT_INIT_CHECK", - [FORMAT_SIMPLE] = "FORMAT_SIMPLE", - [FORMAT_WITH_SPEC] = "FORMAT_WITH_SPEC", - [GET_AITER] = "GET_AITER", - [GET_ANEXT] = "GET_ANEXT", - [GET_ITER] = "GET_ITER", - [GET_LEN] = "GET_LEN", - [GET_YIELD_FROM_ITER] = "GET_YIELD_FROM_ITER", - [INTERPRETER_EXIT] = "INTERPRETER_EXIT", - [LOAD_ASSERTION_ERROR] = "LOAD_ASSERTION_ERROR", - [LOAD_BUILD_CLASS] = "LOAD_BUILD_CLASS", - [LOAD_LOCALS] = "LOAD_LOCALS", - [MAKE_FUNCTION] = "MAKE_FUNCTION", - [MATCH_KEYS] = "MATCH_KEYS", - [MATCH_MAPPING] = "MATCH_MAPPING", - [MATCH_SEQUENCE] = "MATCH_SEQUENCE", - [NOP] = "NOP", - [POP_EXCEPT] = "POP_EXCEPT", - [POP_TOP] = "POP_TOP", - [PUSH_EXC_INFO] = "PUSH_EXC_INFO", - [PUSH_NULL] = "PUSH_NULL", - [RETURN_GENERATOR] = "RETURN_GENERATOR", - [RETURN_VALUE] = "RETURN_VALUE", - [SETUP_ANNOTATIONS] = "SETUP_ANNOTATIONS", - [STORE_SLICE] = "STORE_SLICE", - [STORE_SUBSCR] = "STORE_SUBSCR", - [TO_BOOL] = "TO_BOOL", - [UNARY_INVERT] = "UNARY_INVERT", - [UNARY_NEGATIVE] = "UNARY_NEGATIVE", - [UNARY_NOT] = "UNARY_NOT", - [WITH_EXCEPT_START] = "WITH_EXCEPT_START", + [_SEND] = "_SEND", + [_SPECIALIZE_BINARY_OP] = "_SPECIALIZE_BINARY_OP", + [_SPECIALIZE_BINARY_SUBSCR] = "_SPECIALIZE_BINARY_SUBSCR", + [_SPECIALIZE_CALL] = "_SPECIALIZE_CALL", + [_SPECIALIZE_COMPARE_OP] = "_SPECIALIZE_COMPARE_OP", + [_SPECIALIZE_FOR_ITER] = "_SPECIALIZE_FOR_ITER", + [_SPECIALIZE_LOAD_ATTR] = "_SPECIALIZE_LOAD_ATTR", + [_SPECIALIZE_LOAD_GLOBAL] = "_SPECIALIZE_LOAD_GLOBAL", + [_SPECIALIZE_LOAD_SUPER_ATTR] = "_SPECIALIZE_LOAD_SUPER_ATTR", + [_SPECIALIZE_SEND] = "_SPECIALIZE_SEND", + [_SPECIALIZE_STORE_ATTR] = "_SPECIALIZE_STORE_ATTR", + [_SPECIALIZE_STORE_SUBSCR] = "_SPECIALIZE_STORE_SUBSCR", + [_SPECIALIZE_TO_BOOL] = "_SPECIALIZE_TO_BOOL", + [_SPECIALIZE_UNPACK_SEQUENCE] = "_SPECIALIZE_UNPACK_SEQUENCE", + [_STORE_ATTR] = "_STORE_ATTR", + [_STORE_ATTR_INSTANCE_VALUE] = "_STORE_ATTR_INSTANCE_VALUE", + [_STORE_ATTR_SLOT] = "_STORE_ATTR_SLOT", + [_STORE_SUBSCR] = "_STORE_SUBSCR", + [_TO_BOOL] = "_TO_BOOL", + [_UNPACK_SEQUENCE] = "_UNPACK_SEQUENCE", +}; +#endif // NEED_OPCODE_METADATA + +extern const char *const _PyOpcode_OpName[268]; +#ifdef NEED_OPCODE_METADATA +const char *const _PyOpcode_OpName[268] = { + [BEFORE_ASYNC_WITH] = "BEFORE_ASYNC_WITH", + [BEFORE_WITH] = "BEFORE_WITH", [BINARY_OP] = "BINARY_OP", + [BINARY_OP_ADD_FLOAT] = "BINARY_OP_ADD_FLOAT", + [BINARY_OP_ADD_INT] = "BINARY_OP_ADD_INT", + [BINARY_OP_ADD_UNICODE] = "BINARY_OP_ADD_UNICODE", + [BINARY_OP_INPLACE_ADD_UNICODE] = "BINARY_OP_INPLACE_ADD_UNICODE", + [BINARY_OP_MULTIPLY_FLOAT] = "BINARY_OP_MULTIPLY_FLOAT", + [BINARY_OP_MULTIPLY_INT] = "BINARY_OP_MULTIPLY_INT", + [BINARY_OP_SUBTRACT_FLOAT] = "BINARY_OP_SUBTRACT_FLOAT", + [BINARY_OP_SUBTRACT_INT] = "BINARY_OP_SUBTRACT_INT", + [BINARY_SLICE] = "BINARY_SLICE", + [BINARY_SUBSCR] = "BINARY_SUBSCR", + [BINARY_SUBSCR_DICT] = "BINARY_SUBSCR_DICT", + [BINARY_SUBSCR_GETITEM] = "BINARY_SUBSCR_GETITEM", + [BINARY_SUBSCR_LIST_INT] = "BINARY_SUBSCR_LIST_INT", + [BINARY_SUBSCR_STR_INT] = "BINARY_SUBSCR_STR_INT", + [BINARY_SUBSCR_TUPLE_INT] = "BINARY_SUBSCR_TUPLE_INT", [BUILD_CONST_KEY_MAP] = "BUILD_CONST_KEY_MAP", [BUILD_LIST] = "BUILD_LIST", [BUILD_MAP] = "BUILD_MAP", @@ -2017,12 +1988,37 @@ const char *const _PyOpcode_OpName[268] = { [BUILD_SLICE] = "BUILD_SLICE", [BUILD_STRING] = "BUILD_STRING", [BUILD_TUPLE] = "BUILD_TUPLE", + [CACHE] = "CACHE", [CALL] = "CALL", + [CALL_ALLOC_AND_ENTER_INIT] = "CALL_ALLOC_AND_ENTER_INIT", + [CALL_BOUND_METHOD_EXACT_ARGS] = "CALL_BOUND_METHOD_EXACT_ARGS", + [CALL_BUILTIN_CLASS] = "CALL_BUILTIN_CLASS", + [CALL_BUILTIN_FAST] = "CALL_BUILTIN_FAST", + [CALL_BUILTIN_FAST_WITH_KEYWORDS] = "CALL_BUILTIN_FAST_WITH_KEYWORDS", + [CALL_BUILTIN_O] = "CALL_BUILTIN_O", [CALL_FUNCTION_EX] = "CALL_FUNCTION_EX", [CALL_INTRINSIC_1] = "CALL_INTRINSIC_1", [CALL_INTRINSIC_2] = "CALL_INTRINSIC_2", + [CALL_ISINSTANCE] = "CALL_ISINSTANCE", [CALL_KW] = "CALL_KW", + [CALL_LEN] = "CALL_LEN", + [CALL_LIST_APPEND] = "CALL_LIST_APPEND", + [CALL_METHOD_DESCRIPTOR_FAST] = "CALL_METHOD_DESCRIPTOR_FAST", + [CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = "CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS", + [CALL_METHOD_DESCRIPTOR_NOARGS] = "CALL_METHOD_DESCRIPTOR_NOARGS", + [CALL_METHOD_DESCRIPTOR_O] = "CALL_METHOD_DESCRIPTOR_O", + [CALL_PY_EXACT_ARGS] = "CALL_PY_EXACT_ARGS", + [CALL_PY_WITH_DEFAULTS] = "CALL_PY_WITH_DEFAULTS", + [CALL_STR_1] = "CALL_STR_1", + [CALL_TUPLE_1] = "CALL_TUPLE_1", + [CALL_TYPE_1] = "CALL_TYPE_1", + [CHECK_EG_MATCH] = "CHECK_EG_MATCH", + [CHECK_EXC_MATCH] = "CHECK_EXC_MATCH", + [CLEANUP_THROW] = "CLEANUP_THROW", [COMPARE_OP] = "COMPARE_OP", + [COMPARE_OP_FLOAT] = "COMPARE_OP_FLOAT", + [COMPARE_OP_INT] = "COMPARE_OP_INT", + [COMPARE_OP_STR] = "COMPARE_OP_STR", [CONTAINS_OP] = "CONTAINS_OP", [CONVERT_VALUE] = "CONVERT_VALUE", [COPY] = "COPY", @@ -2032,21 +2028,74 @@ const char *const _PyOpcode_OpName[268] = { [DELETE_FAST] = "DELETE_FAST", [DELETE_GLOBAL] = "DELETE_GLOBAL", [DELETE_NAME] = "DELETE_NAME", + [DELETE_SUBSCR] = "DELETE_SUBSCR", [DICT_MERGE] = "DICT_MERGE", [DICT_UPDATE] = "DICT_UPDATE", + [END_ASYNC_FOR] = "END_ASYNC_FOR", + [END_FOR] = "END_FOR", + [END_SEND] = "END_SEND", [ENTER_EXECUTOR] = "ENTER_EXECUTOR", + [EXIT_INIT_CHECK] = "EXIT_INIT_CHECK", [EXTENDED_ARG] = "EXTENDED_ARG", + [FORMAT_SIMPLE] = "FORMAT_SIMPLE", + [FORMAT_WITH_SPEC] = "FORMAT_WITH_SPEC", [FOR_ITER] = "FOR_ITER", + [FOR_ITER_GEN] = "FOR_ITER_GEN", + [FOR_ITER_LIST] = "FOR_ITER_LIST", + [FOR_ITER_RANGE] = "FOR_ITER_RANGE", + [FOR_ITER_TUPLE] = "FOR_ITER_TUPLE", + [GET_AITER] = "GET_AITER", + [GET_ANEXT] = "GET_ANEXT", [GET_AWAITABLE] = "GET_AWAITABLE", + [GET_ITER] = "GET_ITER", + [GET_LEN] = "GET_LEN", + [GET_YIELD_FROM_ITER] = "GET_YIELD_FROM_ITER", [IMPORT_FROM] = "IMPORT_FROM", [IMPORT_NAME] = "IMPORT_NAME", + [INSTRUMENTED_CALL] = "INSTRUMENTED_CALL", + [INSTRUMENTED_CALL_FUNCTION_EX] = "INSTRUMENTED_CALL_FUNCTION_EX", + [INSTRUMENTED_CALL_KW] = "INSTRUMENTED_CALL_KW", + [INSTRUMENTED_END_FOR] = "INSTRUMENTED_END_FOR", + [INSTRUMENTED_END_SEND] = "INSTRUMENTED_END_SEND", + [INSTRUMENTED_FOR_ITER] = "INSTRUMENTED_FOR_ITER", + [INSTRUMENTED_INSTRUCTION] = "INSTRUMENTED_INSTRUCTION", + [INSTRUMENTED_JUMP_BACKWARD] = "INSTRUMENTED_JUMP_BACKWARD", + [INSTRUMENTED_JUMP_FORWARD] = "INSTRUMENTED_JUMP_FORWARD", + [INSTRUMENTED_LINE] = "INSTRUMENTED_LINE", + [INSTRUMENTED_LOAD_SUPER_ATTR] = "INSTRUMENTED_LOAD_SUPER_ATTR", + [INSTRUMENTED_POP_JUMP_IF_FALSE] = "INSTRUMENTED_POP_JUMP_IF_FALSE", + [INSTRUMENTED_POP_JUMP_IF_NONE] = "INSTRUMENTED_POP_JUMP_IF_NONE", + [INSTRUMENTED_POP_JUMP_IF_NOT_NONE] = "INSTRUMENTED_POP_JUMP_IF_NOT_NONE", + [INSTRUMENTED_POP_JUMP_IF_TRUE] = "INSTRUMENTED_POP_JUMP_IF_TRUE", + [INSTRUMENTED_RESUME] = "INSTRUMENTED_RESUME", + [INSTRUMENTED_RETURN_CONST] = "INSTRUMENTED_RETURN_CONST", + [INSTRUMENTED_RETURN_VALUE] = "INSTRUMENTED_RETURN_VALUE", + [INSTRUMENTED_YIELD_VALUE] = "INSTRUMENTED_YIELD_VALUE", + [INTERPRETER_EXIT] = "INTERPRETER_EXIT", [IS_OP] = "IS_OP", + [JUMP] = "JUMP", [JUMP_BACKWARD] = "JUMP_BACKWARD", [JUMP_BACKWARD_NO_INTERRUPT] = "JUMP_BACKWARD_NO_INTERRUPT", [JUMP_FORWARD] = "JUMP_FORWARD", + [JUMP_NO_INTERRUPT] = "JUMP_NO_INTERRUPT", [LIST_APPEND] = "LIST_APPEND", [LIST_EXTEND] = "LIST_EXTEND", + [LOAD_ASSERTION_ERROR] = "LOAD_ASSERTION_ERROR", [LOAD_ATTR] = "LOAD_ATTR", + [LOAD_ATTR_CLASS] = "LOAD_ATTR_CLASS", + [LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN] = "LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN", + [LOAD_ATTR_INSTANCE_VALUE] = "LOAD_ATTR_INSTANCE_VALUE", + [LOAD_ATTR_METHOD_LAZY_DICT] = "LOAD_ATTR_METHOD_LAZY_DICT", + [LOAD_ATTR_METHOD_NO_DICT] = "LOAD_ATTR_METHOD_NO_DICT", + [LOAD_ATTR_METHOD_WITH_VALUES] = "LOAD_ATTR_METHOD_WITH_VALUES", + [LOAD_ATTR_MODULE] = "LOAD_ATTR_MODULE", + [LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = "LOAD_ATTR_NONDESCRIPTOR_NO_DICT", + [LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = "LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES", + [LOAD_ATTR_PROPERTY] = "LOAD_ATTR_PROPERTY", + [LOAD_ATTR_SLOT] = "LOAD_ATTR_SLOT", + [LOAD_ATTR_WITH_HINT] = "LOAD_ATTR_WITH_HINT", + [LOAD_BUILD_CLASS] = "LOAD_BUILD_CLASS", + [LOAD_CLOSURE] = "LOAD_CLOSURE", [LOAD_CONST] = "LOAD_CONST", [LOAD_DEREF] = "LOAD_DEREF", [LOAD_FAST] = "LOAD_FAST", @@ -2056,133 +2105,84 @@ const char *const _PyOpcode_OpName[268] = { [LOAD_FROM_DICT_OR_DEREF] = "LOAD_FROM_DICT_OR_DEREF", [LOAD_FROM_DICT_OR_GLOBALS] = "LOAD_FROM_DICT_OR_GLOBALS", [LOAD_GLOBAL] = "LOAD_GLOBAL", + [LOAD_GLOBAL_BUILTIN] = "LOAD_GLOBAL_BUILTIN", + [LOAD_GLOBAL_MODULE] = "LOAD_GLOBAL_MODULE", + [LOAD_LOCALS] = "LOAD_LOCALS", + [LOAD_METHOD] = "LOAD_METHOD", [LOAD_NAME] = "LOAD_NAME", [LOAD_SUPER_ATTR] = "LOAD_SUPER_ATTR", + [LOAD_SUPER_ATTR_ATTR] = "LOAD_SUPER_ATTR_ATTR", + [LOAD_SUPER_ATTR_METHOD] = "LOAD_SUPER_ATTR_METHOD", + [LOAD_SUPER_METHOD] = "LOAD_SUPER_METHOD", + [LOAD_ZERO_SUPER_ATTR] = "LOAD_ZERO_SUPER_ATTR", + [LOAD_ZERO_SUPER_METHOD] = "LOAD_ZERO_SUPER_METHOD", [MAKE_CELL] = "MAKE_CELL", + [MAKE_FUNCTION] = "MAKE_FUNCTION", [MAP_ADD] = "MAP_ADD", [MATCH_CLASS] = "MATCH_CLASS", + [MATCH_KEYS] = "MATCH_KEYS", + [MATCH_MAPPING] = "MATCH_MAPPING", + [MATCH_SEQUENCE] = "MATCH_SEQUENCE", + [NOP] = "NOP", + [POP_BLOCK] = "POP_BLOCK", + [POP_EXCEPT] = "POP_EXCEPT", [POP_JUMP_IF_FALSE] = "POP_JUMP_IF_FALSE", [POP_JUMP_IF_NONE] = "POP_JUMP_IF_NONE", [POP_JUMP_IF_NOT_NONE] = "POP_JUMP_IF_NOT_NONE", [POP_JUMP_IF_TRUE] = "POP_JUMP_IF_TRUE", + [POP_TOP] = "POP_TOP", + [PUSH_EXC_INFO] = "PUSH_EXC_INFO", + [PUSH_NULL] = "PUSH_NULL", [RAISE_VARARGS] = "RAISE_VARARGS", [RERAISE] = "RERAISE", + [RESERVED] = "RESERVED", + [RESUME] = "RESUME", + [RESUME_CHECK] = "RESUME_CHECK", [RETURN_CONST] = "RETURN_CONST", + [RETURN_GENERATOR] = "RETURN_GENERATOR", + [RETURN_VALUE] = "RETURN_VALUE", [SEND] = "SEND", + [SEND_GEN] = "SEND_GEN", + [SETUP_ANNOTATIONS] = "SETUP_ANNOTATIONS", + [SETUP_CLEANUP] = "SETUP_CLEANUP", + [SETUP_FINALLY] = "SETUP_FINALLY", + [SETUP_WITH] = "SETUP_WITH", [SET_ADD] = "SET_ADD", [SET_FUNCTION_ATTRIBUTE] = "SET_FUNCTION_ATTRIBUTE", [SET_UPDATE] = "SET_UPDATE", [STORE_ATTR] = "STORE_ATTR", + [STORE_ATTR_INSTANCE_VALUE] = "STORE_ATTR_INSTANCE_VALUE", + [STORE_ATTR_SLOT] = "STORE_ATTR_SLOT", + [STORE_ATTR_WITH_HINT] = "STORE_ATTR_WITH_HINT", [STORE_DEREF] = "STORE_DEREF", [STORE_FAST] = "STORE_FAST", [STORE_FAST_LOAD_FAST] = "STORE_FAST_LOAD_FAST", + [STORE_FAST_MAYBE_NULL] = "STORE_FAST_MAYBE_NULL", [STORE_FAST_STORE_FAST] = "STORE_FAST_STORE_FAST", [STORE_GLOBAL] = "STORE_GLOBAL", [STORE_NAME] = "STORE_NAME", - [SWAP] = "SWAP", - [UNPACK_EX] = "UNPACK_EX", - [UNPACK_SEQUENCE] = "UNPACK_SEQUENCE", - [YIELD_VALUE] = "YIELD_VALUE", - [BINARY_OP_ADD_FLOAT] = "BINARY_OP_ADD_FLOAT", - [BINARY_OP_ADD_INT] = "BINARY_OP_ADD_INT", - [BINARY_OP_ADD_UNICODE] = "BINARY_OP_ADD_UNICODE", - [BINARY_OP_MULTIPLY_FLOAT] = "BINARY_OP_MULTIPLY_FLOAT", - [BINARY_OP_MULTIPLY_INT] = "BINARY_OP_MULTIPLY_INT", - [BINARY_OP_SUBTRACT_FLOAT] = "BINARY_OP_SUBTRACT_FLOAT", - [BINARY_OP_SUBTRACT_INT] = "BINARY_OP_SUBTRACT_INT", - [BINARY_SUBSCR_DICT] = "BINARY_SUBSCR_DICT", - [BINARY_SUBSCR_GETITEM] = "BINARY_SUBSCR_GETITEM", - [BINARY_SUBSCR_LIST_INT] = "BINARY_SUBSCR_LIST_INT", - [BINARY_SUBSCR_STR_INT] = "BINARY_SUBSCR_STR_INT", - [BINARY_SUBSCR_TUPLE_INT] = "BINARY_SUBSCR_TUPLE_INT", - [CALL_ALLOC_AND_ENTER_INIT] = "CALL_ALLOC_AND_ENTER_INIT", - [CALL_BOUND_METHOD_EXACT_ARGS] = "CALL_BOUND_METHOD_EXACT_ARGS", - [CALL_BUILTIN_CLASS] = "CALL_BUILTIN_CLASS", - [CALL_BUILTIN_FAST] = "CALL_BUILTIN_FAST", - [CALL_BUILTIN_FAST_WITH_KEYWORDS] = "CALL_BUILTIN_FAST_WITH_KEYWORDS", - [CALL_BUILTIN_O] = "CALL_BUILTIN_O", - [CALL_ISINSTANCE] = "CALL_ISINSTANCE", - [CALL_LEN] = "CALL_LEN", - [CALL_LIST_APPEND] = "CALL_LIST_APPEND", - [CALL_METHOD_DESCRIPTOR_FAST] = "CALL_METHOD_DESCRIPTOR_FAST", - [CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = "CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS", - [CALL_METHOD_DESCRIPTOR_NOARGS] = "CALL_METHOD_DESCRIPTOR_NOARGS", - [CALL_METHOD_DESCRIPTOR_O] = "CALL_METHOD_DESCRIPTOR_O", - [CALL_PY_EXACT_ARGS] = "CALL_PY_EXACT_ARGS", - [CALL_PY_WITH_DEFAULTS] = "CALL_PY_WITH_DEFAULTS", - [CALL_STR_1] = "CALL_STR_1", - [CALL_TUPLE_1] = "CALL_TUPLE_1", - [CALL_TYPE_1] = "CALL_TYPE_1", - [COMPARE_OP_FLOAT] = "COMPARE_OP_FLOAT", - [COMPARE_OP_INT] = "COMPARE_OP_INT", - [COMPARE_OP_STR] = "COMPARE_OP_STR", - [FOR_ITER_GEN] = "FOR_ITER_GEN", - [FOR_ITER_LIST] = "FOR_ITER_LIST", - [FOR_ITER_RANGE] = "FOR_ITER_RANGE", - [FOR_ITER_TUPLE] = "FOR_ITER_TUPLE", - [LOAD_ATTR_CLASS] = "LOAD_ATTR_CLASS", - [LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN] = "LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN", - [LOAD_ATTR_INSTANCE_VALUE] = "LOAD_ATTR_INSTANCE_VALUE", - [LOAD_ATTR_METHOD_LAZY_DICT] = "LOAD_ATTR_METHOD_LAZY_DICT", - [LOAD_ATTR_METHOD_NO_DICT] = "LOAD_ATTR_METHOD_NO_DICT", - [LOAD_ATTR_METHOD_WITH_VALUES] = "LOAD_ATTR_METHOD_WITH_VALUES", - [LOAD_ATTR_MODULE] = "LOAD_ATTR_MODULE", - [LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = "LOAD_ATTR_NONDESCRIPTOR_NO_DICT", - [LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = "LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES", - [LOAD_ATTR_PROPERTY] = "LOAD_ATTR_PROPERTY", - [LOAD_ATTR_SLOT] = "LOAD_ATTR_SLOT", - [LOAD_ATTR_WITH_HINT] = "LOAD_ATTR_WITH_HINT", - [LOAD_GLOBAL_BUILTIN] = "LOAD_GLOBAL_BUILTIN", - [LOAD_GLOBAL_MODULE] = "LOAD_GLOBAL_MODULE", - [LOAD_SUPER_ATTR_ATTR] = "LOAD_SUPER_ATTR_ATTR", - [LOAD_SUPER_ATTR_METHOD] = "LOAD_SUPER_ATTR_METHOD", - [RESUME_CHECK] = "RESUME_CHECK", - [SEND_GEN] = "SEND_GEN", - [STORE_ATTR_INSTANCE_VALUE] = "STORE_ATTR_INSTANCE_VALUE", - [STORE_ATTR_SLOT] = "STORE_ATTR_SLOT", - [STORE_ATTR_WITH_HINT] = "STORE_ATTR_WITH_HINT", + [STORE_SLICE] = "STORE_SLICE", + [STORE_SUBSCR] = "STORE_SUBSCR", [STORE_SUBSCR_DICT] = "STORE_SUBSCR_DICT", [STORE_SUBSCR_LIST_INT] = "STORE_SUBSCR_LIST_INT", + [SWAP] = "SWAP", + [TO_BOOL] = "TO_BOOL", [TO_BOOL_ALWAYS_TRUE] = "TO_BOOL_ALWAYS_TRUE", [TO_BOOL_BOOL] = "TO_BOOL_BOOL", [TO_BOOL_INT] = "TO_BOOL_INT", [TO_BOOL_LIST] = "TO_BOOL_LIST", [TO_BOOL_NONE] = "TO_BOOL_NONE", [TO_BOOL_STR] = "TO_BOOL_STR", + [UNARY_INVERT] = "UNARY_INVERT", + [UNARY_NEGATIVE] = "UNARY_NEGATIVE", + [UNARY_NOT] = "UNARY_NOT", + [UNPACK_EX] = "UNPACK_EX", + [UNPACK_SEQUENCE] = "UNPACK_SEQUENCE", [UNPACK_SEQUENCE_LIST] = "UNPACK_SEQUENCE_LIST", [UNPACK_SEQUENCE_TUPLE] = "UNPACK_SEQUENCE_TUPLE", [UNPACK_SEQUENCE_TWO_TUPLE] = "UNPACK_SEQUENCE_TWO_TUPLE", - [INSTRUMENTED_RESUME] = "INSTRUMENTED_RESUME", - [INSTRUMENTED_END_FOR] = "INSTRUMENTED_END_FOR", - [INSTRUMENTED_END_SEND] = "INSTRUMENTED_END_SEND", - [INSTRUMENTED_RETURN_VALUE] = "INSTRUMENTED_RETURN_VALUE", - [INSTRUMENTED_RETURN_CONST] = "INSTRUMENTED_RETURN_CONST", - [INSTRUMENTED_YIELD_VALUE] = "INSTRUMENTED_YIELD_VALUE", - [INSTRUMENTED_LOAD_SUPER_ATTR] = "INSTRUMENTED_LOAD_SUPER_ATTR", - [INSTRUMENTED_FOR_ITER] = "INSTRUMENTED_FOR_ITER", - [INSTRUMENTED_CALL] = "INSTRUMENTED_CALL", - [INSTRUMENTED_CALL_KW] = "INSTRUMENTED_CALL_KW", - [INSTRUMENTED_CALL_FUNCTION_EX] = "INSTRUMENTED_CALL_FUNCTION_EX", - [INSTRUMENTED_INSTRUCTION] = "INSTRUMENTED_INSTRUCTION", - [INSTRUMENTED_JUMP_FORWARD] = "INSTRUMENTED_JUMP_FORWARD", - [INSTRUMENTED_JUMP_BACKWARD] = "INSTRUMENTED_JUMP_BACKWARD", - [INSTRUMENTED_POP_JUMP_IF_TRUE] = "INSTRUMENTED_POP_JUMP_IF_TRUE", - [INSTRUMENTED_POP_JUMP_IF_FALSE] = "INSTRUMENTED_POP_JUMP_IF_FALSE", - [INSTRUMENTED_POP_JUMP_IF_NONE] = "INSTRUMENTED_POP_JUMP_IF_NONE", - [INSTRUMENTED_POP_JUMP_IF_NOT_NONE] = "INSTRUMENTED_POP_JUMP_IF_NOT_NONE", - [INSTRUMENTED_LINE] = "INSTRUMENTED_LINE", - [JUMP] = "JUMP", - [JUMP_NO_INTERRUPT] = "JUMP_NO_INTERRUPT", - [LOAD_CLOSURE] = "LOAD_CLOSURE", - [LOAD_METHOD] = "LOAD_METHOD", - [LOAD_SUPER_METHOD] = "LOAD_SUPER_METHOD", - [LOAD_ZERO_SUPER_ATTR] = "LOAD_ZERO_SUPER_ATTR", - [LOAD_ZERO_SUPER_METHOD] = "LOAD_ZERO_SUPER_METHOD", - [POP_BLOCK] = "POP_BLOCK", - [SETUP_CLEANUP] = "SETUP_CLEANUP", - [SETUP_FINALLY] = "SETUP_FINALLY", - [SETUP_WITH] = "SETUP_WITH", - [STORE_FAST_MAYBE_NULL] = "STORE_FAST_MAYBE_NULL", + [WITH_EXCEPT_START] = "WITH_EXCEPT_START", + [YIELD_VALUE] = "YIELD_VALUE", }; #endif // NEED_OPCODE_METADATA diff --git a/Include/object.h b/Include/object.h index bd576b0bd43211..48f1ddf7510887 100644 --- a/Include/object.h +++ b/Include/object.h @@ -239,6 +239,8 @@ PyAPI_FUNC(int) Py_Is(PyObject *x, PyObject *y); #define Py_Is(x, y) ((x) == (y)) #if defined(Py_GIL_DISABLED) && !defined(Py_LIMITED_API) +PyAPI_FUNC(uintptr_t) _Py_GetThreadLocal_Addr(void); + static inline uintptr_t _Py_ThreadId(void) { @@ -283,8 +285,17 @@ _Py_ThreadId(void) // Both GCC and Clang have supported __builtin_thread_pointer // for s390 from long time ago. tid = (uintptr_t)__builtin_thread_pointer(); +#elif defined(__riscv) + #if defined(__clang__) && _Py__has_builtin(__builtin_thread_pointer) + tid = (uintptr_t)__builtin_thread_pointer(); + #else + // tp is Thread Pointer provided by the RISC-V ABI. + __asm__ ("mv %0, tp" : "=r" (tid)); + #endif #else - # error "define _Py_ThreadId for this platform" + // Fallback to a portable implementation if we do not have a faster + // platform-specific implementation. + tid = _Py_GetThreadLocal_Addr(); #endif return tid; } diff --git a/Lib/_osx_support.py b/Lib/_osx_support.py index aa66c8b9f4189f..0cb064fcd791be 100644 --- a/Lib/_osx_support.py +++ b/Lib/_osx_support.py @@ -507,6 +507,11 @@ def get_platform_osx(_config_vars, osname, release, machine): # MACOSX_DEPLOYMENT_TARGET. macver = _config_vars.get('MACOSX_DEPLOYMENT_TARGET', '') + if macver and '.' not in macver: + # Ensure that the version includes at least a major + # and minor version, even if MACOSX_DEPLOYMENT_TARGET + # is set to a single-label version like "14". + macver += '.0' macrelease = _get_system_version() or macver macver = macver or macrelease diff --git a/Lib/dis.py b/Lib/dis.py index 183091cb0d6098..1a2f1032d500af 100644 --- a/Lib/dis.py +++ b/Lib/dis.py @@ -113,7 +113,14 @@ def dis(x=None, *, file=None, depth=None, show_caches=False, adaptive=False, elif hasattr(x, 'co_code'): # Code object _disassemble_recursive(x, file=file, depth=depth, show_caches=show_caches, adaptive=adaptive, show_offsets=show_offsets) elif isinstance(x, (bytes, bytearray)): # Raw bytecode - _disassemble_bytes(x, file=file, show_caches=show_caches, show_offsets=show_offsets) + labels_map = _make_labels_map(x) + label_width = 4 + len(str(len(labels_map))) + formatter = Formatter(file=file, + offset_width=len(str(max(len(x) - 2, 9999))) if show_offsets else 0, + label_width=label_width, + show_caches=show_caches) + arg_resolver = ArgResolver(labels_map=labels_map) + _disassemble_bytes(x, arg_resolver=arg_resolver, formatter=formatter) elif isinstance(x, str): # Source code _disassemble_str(x, file=file, depth=depth, show_caches=show_caches, adaptive=adaptive, show_offsets=show_offsets) else: @@ -394,23 +401,41 @@ def __str__(self): class Formatter: def __init__(self, file=None, lineno_width=0, offset_width=0, label_width=0, - line_offset=0): + line_offset=0, show_caches=False): """Create a Formatter *file* where to write the output *lineno_width* sets the width of the line number field (0 omits it) *offset_width* sets the width of the instruction offset field *label_width* sets the width of the label field + *show_caches* is a boolean indicating whether to display cache lines - *line_offset* the line number (within the code unit) """ self.file = file self.lineno_width = lineno_width self.offset_width = offset_width self.label_width = label_width - + self.show_caches = show_caches def print_instruction(self, instr, mark_as_current=False): + self.print_instruction_line(instr, mark_as_current) + if self.show_caches and instr.cache_info: + offset = instr.offset + for name, size, data in instr.cache_info: + for i in range(size): + offset += 2 + # Only show the fancy argrepr for a CACHE instruction when it's + # the first entry for a particular cache value: + if i == 0: + argrepr = f"{name}: {int.from_bytes(data, sys.byteorder)}" + else: + argrepr = "" + self.print_instruction_line( + Instruction("CACHE", CACHE, 0, None, argrepr, offset, offset, + False, None, None, instr.positions), + False) + + def print_instruction_line(self, instr, mark_as_current): """Format instruction details for inclusion in disassembly output.""" lineno_width = self.lineno_width offset_width = self.offset_width @@ -474,11 +499,14 @@ def print_exception_table(self, exception_entries): class ArgResolver: - def __init__(self, co_consts, names, varname_from_oparg, labels_map): + def __init__(self, co_consts=None, names=None, varname_from_oparg=None, labels_map=None): self.co_consts = co_consts self.names = names self.varname_from_oparg = varname_from_oparg - self.labels_map = labels_map + self.labels_map = labels_map or {} + + def get_label_for_offset(self, offset): + return self.labels_map.get(offset, None) def get_argval_argrepr(self, op, arg, offset): get_name = None if self.names is None else self.names.__getitem__ @@ -547,8 +575,7 @@ def get_argval_argrepr(self, op, arg, offset): argrepr = _intrinsic_2_descs[arg] return argval, argrepr - -def get_instructions(x, *, first_line=None, show_caches=False, adaptive=False): +def get_instructions(x, *, first_line=None, show_caches=None, adaptive=False): """Iterator for the opcodes in methods, functions or code Generates a series of Instruction named tuples giving the details of @@ -567,9 +594,10 @@ def get_instructions(x, *, first_line=None, show_caches=False, adaptive=False): line_offset = 0 original_code = co.co_code - labels_map = _make_labels_map(original_code) - arg_resolver = ArgResolver(co.co_consts, co.co_names, co._varname_from_oparg, - labels_map) + arg_resolver = ArgResolver(co_consts=co.co_consts, + names=co.co_names, + varname_from_oparg=co._varname_from_oparg, + labels_map=_make_labels_map(original_code)) return _get_instructions_bytes(_get_code_array(co, adaptive), linestarts=linestarts, line_offset=line_offset, @@ -648,7 +676,7 @@ def _is_backward_jump(op): 'ENTER_EXECUTOR') def _get_instructions_bytes(code, linestarts=None, line_offset=0, co_positions=None, - original_code=None, labels_map=None, arg_resolver=None): + original_code=None, arg_resolver=None): """Iterate over the instructions in a bytecode string. Generates a sequence of Instruction namedtuples giving the details of each @@ -661,8 +689,6 @@ def _get_instructions_bytes(code, linestarts=None, line_offset=0, co_positions=N original_code = original_code or code co_positions = co_positions or iter(()) - labels_map = labels_map or _make_labels_map(original_code) - starts_line = False local_line_number = None line_number = None @@ -684,10 +710,6 @@ def _get_instructions_bytes(code, linestarts=None, line_offset=0, co_positions=N else: argval, argrepr = arg, repr(arg) - instr = Instruction(_all_opname[op], op, arg, argval, argrepr, - offset, start_offset, starts_line, line_number, - labels_map.get(offset, None), positions) - caches = _get_cache_size(_all_opname[deop]) # Advance the co_positions iterator: for _ in range(caches): @@ -701,10 +723,10 @@ def _get_instructions_bytes(code, linestarts=None, line_offset=0, co_positions=N else: cache_info = None + label = arg_resolver.get_label_for_offset(offset) if arg_resolver else None yield Instruction(_all_opname[op], op, arg, argval, argrepr, offset, start_offset, starts_line, line_number, - labels_map.get(offset, None), positions, cache_info) - + label, positions, cache_info) def disassemble(co, lasti=-1, *, file=None, show_caches=False, adaptive=False, @@ -712,12 +734,20 @@ def disassemble(co, lasti=-1, *, file=None, show_caches=False, adaptive=False, """Disassemble a code object.""" linestarts = dict(findlinestarts(co)) exception_entries = _parse_exception_table(co) - _disassemble_bytes(_get_code_array(co, adaptive), - lasti, co._varname_from_oparg, - co.co_names, co.co_consts, linestarts, file=file, - exception_entries=exception_entries, - co_positions=co.co_positions(), show_caches=show_caches, - original_code=co.co_code, show_offsets=show_offsets) + labels_map = _make_labels_map(co.co_code, exception_entries=exception_entries) + label_width = 4 + len(str(len(labels_map))) + formatter = Formatter(file=file, + lineno_width=_get_lineno_width(linestarts), + offset_width=len(str(max(len(co.co_code) - 2, 9999))) if show_offsets else 0, + label_width=label_width, + show_caches=show_caches) + arg_resolver = ArgResolver(co_consts=co.co_consts, + names=co.co_names, + varname_from_oparg=co._varname_from_oparg, + labels_map=labels_map) + _disassemble_bytes(_get_code_array(co, adaptive), lasti, linestarts, + exception_entries=exception_entries, co_positions=co.co_positions(), + original_code=co.co_code, arg_resolver=arg_resolver, formatter=formatter) def _disassemble_recursive(co, *, file=None, depth=None, show_caches=False, adaptive=False, show_offsets=False): disassemble(co, file=file, show_caches=show_caches, adaptive=adaptive, show_offsets=show_offsets) @@ -764,60 +794,29 @@ def _get_lineno_width(linestarts): return lineno_width -def _disassemble_bytes(code, lasti=-1, varname_from_oparg=None, - names=None, co_consts=None, linestarts=None, - *, file=None, line_offset=0, exception_entries=(), - co_positions=None, show_caches=False, original_code=None, - show_offsets=False): - - offset_width = len(str(max(len(code) - 2, 9999))) if show_offsets else 0 - - labels_map = _make_labels_map(original_code or code, exception_entries) - label_width = 4 + len(str(len(labels_map))) +def _disassemble_bytes(code, lasti=-1, linestarts=None, + *, line_offset=0, exception_entries=(), + co_positions=None, original_code=None, + arg_resolver=None, formatter=None): - formatter = Formatter(file=file, - lineno_width=_get_lineno_width(linestarts), - offset_width=offset_width, - label_width=label_width, - line_offset=line_offset) + assert formatter is not None + assert arg_resolver is not None - arg_resolver = ArgResolver(co_consts, names, varname_from_oparg, labels_map) instrs = _get_instructions_bytes(code, linestarts=linestarts, line_offset=line_offset, co_positions=co_positions, original_code=original_code, - labels_map=labels_map, arg_resolver=arg_resolver) - print_instructions(instrs, exception_entries, formatter, - show_caches=show_caches, lasti=lasti) + print_instructions(instrs, exception_entries, formatter, lasti=lasti) -def print_instructions(instrs, exception_entries, formatter, show_caches=False, lasti=-1): +def print_instructions(instrs, exception_entries, formatter, lasti=-1): for instr in instrs: - if show_caches: - is_current_instr = instr.offset == lasti - else: - # Each CACHE takes 2 bytes - is_current_instr = instr.offset <= lasti \ - <= instr.offset + 2 * _get_cache_size(_all_opname[_deoptop(instr.opcode)]) + # Each CACHE takes 2 bytes + is_current_instr = instr.offset <= lasti \ + <= instr.offset + 2 * _get_cache_size(_all_opname[_deoptop(instr.opcode)]) formatter.print_instruction(instr, is_current_instr) - deop = _deoptop(instr.opcode) - if show_caches and instr.cache_info: - offset = instr.offset - for name, size, data in instr.cache_info: - for i in range(size): - offset += 2 - # Only show the fancy argrepr for a CACHE instruction when it's - # the first entry for a particular cache value: - if i == 0: - argrepr = f"{name}: {int.from_bytes(data, sys.byteorder)}" - else: - argrepr = "" - formatter.print_instruction( - Instruction("CACHE", CACHE, 0, None, argrepr, offset, offset, - False, None, None, instr.positions), - is_current_instr) formatter.print_exception_table(exception_entries) @@ -960,14 +959,15 @@ def __iter__(self): co = self.codeobj original_code = co.co_code labels_map = _make_labels_map(original_code, self.exception_entries) - arg_resolver = ArgResolver(co.co_consts, co.co_names, co._varname_from_oparg, - labels_map) + arg_resolver = ArgResolver(co_consts=co.co_consts, + names=co.co_names, + varname_from_oparg=co._varname_from_oparg, + labels_map=labels_map) return _get_instructions_bytes(_get_code_array(co, self.adaptive), linestarts=self._linestarts, line_offset=self._line_offset, co_positions=co.co_positions(), original_code=original_code, - labels_map=labels_map, arg_resolver=arg_resolver) def __repr__(self): @@ -995,18 +995,32 @@ def dis(self): else: offset = -1 with io.StringIO() as output: - _disassemble_bytes(_get_code_array(co, self.adaptive), - varname_from_oparg=co._varname_from_oparg, - names=co.co_names, co_consts=co.co_consts, + code = _get_code_array(co, self.adaptive) + offset_width = len(str(max(len(code) - 2, 9999))) if self.show_offsets else 0 + + + labels_map = _make_labels_map(co.co_code, self.exception_entries) + label_width = 4 + len(str(len(labels_map))) + formatter = Formatter(file=output, + lineno_width=_get_lineno_width(self._linestarts), + offset_width=offset_width, + label_width=label_width, + line_offset=self._line_offset, + show_caches=self.show_caches) + + arg_resolver = ArgResolver(co_consts=co.co_consts, + names=co.co_names, + varname_from_oparg=co._varname_from_oparg, + labels_map=labels_map) + _disassemble_bytes(code, linestarts=self._linestarts, line_offset=self._line_offset, - file=output, lasti=offset, exception_entries=self.exception_entries, co_positions=co.co_positions(), - show_caches=self.show_caches, original_code=co.co_code, - show_offsets=self.show_offsets) + arg_resolver=arg_resolver, + formatter=formatter) return output.getvalue() diff --git a/Lib/doctest.py b/Lib/doctest.py index d109b6c9e37343..114aac62a34e95 100644 --- a/Lib/doctest.py +++ b/Lib/doctest.py @@ -1136,6 +1136,8 @@ def _find_lineno(self, obj, source_lines): # Find the line number for functions & methods. if inspect.ismethod(obj): obj = obj.__func__ + if isinstance(obj, property): + obj = obj.fget if inspect.isfunction(obj) and getattr(obj, '__doc__', None): # We don't use `docstring` var here, because `obj` can be changed. obj = obj.__code__ diff --git a/Lib/email/utils.py b/Lib/email/utils.py index 9175f2fdb6e69e..103cef61a83538 100644 --- a/Lib/email/utils.py +++ b/Lib/email/utils.py @@ -43,6 +43,7 @@ specialsre = re.compile(r'[][\\()<>@,:;".]') escapesre = re.compile(r'[\\"]') + def _has_surrogates(s): """Return True if s may contain surrogate-escaped binary data.""" # This check is based on the fact that unless there are surrogates, utf8 @@ -103,12 +104,127 @@ def formataddr(pair, charset='utf-8'): return address +def _iter_escaped_chars(addr): + pos = 0 + escape = False + for pos, ch in enumerate(addr): + if escape: + yield (pos, '\\' + ch) + escape = False + elif ch == '\\': + escape = True + else: + yield (pos, ch) + if escape: + yield (pos, '\\') + + +def _strip_quoted_realnames(addr): + """Strip real names between quotes.""" + if '"' not in addr: + # Fast path + return addr + + start = 0 + open_pos = None + result = [] + for pos, ch in _iter_escaped_chars(addr): + if ch == '"': + if open_pos is None: + open_pos = pos + else: + if start != open_pos: + result.append(addr[start:open_pos]) + start = pos + 1 + open_pos = None + + if start < len(addr): + result.append(addr[start:]) + + return ''.join(result) -def getaddresses(fieldvalues): - """Return a list of (REALNAME, EMAIL) for each fieldvalue.""" - all = COMMASPACE.join(str(v) for v in fieldvalues) - a = _AddressList(all) - return a.addresslist + +supports_strict_parsing = True + +def getaddresses(fieldvalues, *, strict=True): + """Return a list of (REALNAME, EMAIL) or ('','') for each fieldvalue. + + When parsing fails for a fieldvalue, a 2-tuple of ('', '') is returned in + its place. + + If strict is true, use a strict parser which rejects malformed inputs. + """ + + # If strict is true, if the resulting list of parsed addresses is greater + # than the number of fieldvalues in the input list, a parsing error has + # occurred and consequently a list containing a single empty 2-tuple [('', + # '')] is returned in its place. This is done to avoid invalid output. + # + # Malformed input: getaddresses(['alice@example.com ']) + # Invalid output: [('', 'alice@example.com'), ('', 'bob@example.com')] + # Safe output: [('', '')] + + if not strict: + all = COMMASPACE.join(str(v) for v in fieldvalues) + a = _AddressList(all) + return a.addresslist + + fieldvalues = [str(v) for v in fieldvalues] + fieldvalues = _pre_parse_validation(fieldvalues) + addr = COMMASPACE.join(fieldvalues) + a = _AddressList(addr) + result = _post_parse_validation(a.addresslist) + + # Treat output as invalid if the number of addresses is not equal to the + # expected number of addresses. + n = 0 + for v in fieldvalues: + # When a comma is used in the Real Name part it is not a deliminator. + # So strip those out before counting the commas. + v = _strip_quoted_realnames(v) + # Expected number of addresses: 1 + number of commas + n += 1 + v.count(',') + if len(result) != n: + return [('', '')] + + return result + + +def _check_parenthesis(addr): + # Ignore parenthesis in quoted real names. + addr = _strip_quoted_realnames(addr) + + opens = 0 + for pos, ch in _iter_escaped_chars(addr): + if ch == '(': + opens += 1 + elif ch == ')': + opens -= 1 + if opens < 0: + return False + return (opens == 0) + + +def _pre_parse_validation(email_header_fields): + accepted_values = [] + for v in email_header_fields: + if not _check_parenthesis(v): + v = "('', '')" + accepted_values.append(v) + + return accepted_values + + +def _post_parse_validation(parsed_email_header_tuples): + accepted_values = [] + # The parser would have parsed a correctly formatted domain-literal + # The existence of an [ after parsing indicates a parsing failure + for v in parsed_email_header_tuples: + if '[' in v[1]: + v = ('', '') + accepted_values.append(v) + + return accepted_values def _format_timetuple_and_zone(timetuple, zone): @@ -207,16 +323,33 @@ def parsedate_to_datetime(data): tzinfo=datetime.timezone(datetime.timedelta(seconds=tz))) -def parseaddr(addr): +def parseaddr(addr, *, strict=True): """ Parse addr into its constituent realname and email address parts. Return a tuple of realname and email address, unless the parse fails, in which case return a 2-tuple of ('', ''). + + If strict is True, use a strict parser which rejects malformed inputs. """ - addrs = _AddressList(addr).addresslist - if not addrs: - return '', '' + if not strict: + addrs = _AddressList(addr).addresslist + if not addrs: + return ('', '') + return addrs[0] + + if isinstance(addr, list): + addr = addr[0] + + if not isinstance(addr, str): + return ('', '') + + addr = _pre_parse_validation([addr])[0] + addrs = _post_parse_validation(_AddressList(addr).addresslist) + + if not addrs or len(addrs) > 1: + return ('', '') + return addrs[0] diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py index 21e4ad99a39628..a09bf3201e1fb7 100644 --- a/Lib/ensurepip/__init__.py +++ b/Lib/ensurepip/__init__.py @@ -10,7 +10,7 @@ __all__ = ["version", "bootstrap"] _PACKAGE_NAMES = ('pip',) -_PIP_VERSION = "23.3.1" +_PIP_VERSION = "23.3.2" _PROJECTS = [ ("pip", _PIP_VERSION, "py3"), ] diff --git a/Lib/ensurepip/_bundled/pip-23.3.1-py3-none-any.whl b/Lib/ensurepip/_bundled/pip-23.3.2-py3-none-any.whl similarity index 94% rename from Lib/ensurepip/_bundled/pip-23.3.1-py3-none-any.whl rename to Lib/ensurepip/_bundled/pip-23.3.2-py3-none-any.whl index a4faf716b663e6..ae78b8a6ce0737 100644 Binary files a/Lib/ensurepip/_bundled/pip-23.3.1-py3-none-any.whl and b/Lib/ensurepip/_bundled/pip-23.3.2-py3-none-any.whl differ diff --git a/Lib/fractions.py b/Lib/fractions.py index c95db0730e5b6d..6532d5d54e3c35 100644 --- a/Lib/fractions.py +++ b/Lib/fractions.py @@ -139,6 +139,23 @@ def _round_to_figures(n, d, figures): return sign, significand, exponent +# Pattern for matching non-float-style format specifications. +_GENERAL_FORMAT_SPECIFICATION_MATCHER = re.compile(r""" + (?: + (?P.)? + (?P[<>=^]) + )? + (?P[-+ ]?) + # Alt flag forces a slash and denominator in the output, even for + # integer-valued Fraction objects. + (?P\#)? + # We don't implement the zeropad flag since there's no single obvious way + # to interpret it. + (?P0|[1-9][0-9]*)? + (?P[,_])? +""", re.DOTALL | re.VERBOSE).fullmatch + + # Pattern for matching float-style format specifications; # supports 'e', 'E', 'f', 'F', 'g', 'G' and '%' presentation types. _FLOAT_FORMAT_SPECIFICATION_MATCHER = re.compile(r""" @@ -414,27 +431,42 @@ def __str__(self): else: return '%s/%s' % (self._numerator, self._denominator) - def __format__(self, format_spec, /): - """Format this fraction according to the given format specification.""" - - # Backwards compatiblility with existing formatting. - if not format_spec: - return str(self) + def _format_general(self, match): + """Helper method for __format__. + Handles fill, alignment, signs, and thousands separators in the + case of no presentation type. + """ # Validate and parse the format specifier. - match = _FLOAT_FORMAT_SPECIFICATION_MATCHER(format_spec) - if match is None: - raise ValueError( - f"Invalid format specifier {format_spec!r} " - f"for object of type {type(self).__name__!r}" - ) - elif match["align"] is not None and match["zeropad"] is not None: - # Avoid the temptation to guess. - raise ValueError( - f"Invalid format specifier {format_spec!r} " - f"for object of type {type(self).__name__!r}; " - "can't use explicit alignment when zero-padding" - ) + fill = match["fill"] or " " + align = match["align"] or ">" + pos_sign = "" if match["sign"] == "-" else match["sign"] + alternate_form = bool(match["alt"]) + minimumwidth = int(match["minimumwidth"] or "0") + thousands_sep = match["thousands_sep"] or '' + + # Determine the body and sign representation. + n, d = self._numerator, self._denominator + if d > 1 or alternate_form: + body = f"{abs(n):{thousands_sep}}/{d:{thousands_sep}}" + else: + body = f"{abs(n):{thousands_sep}}" + sign = '-' if n < 0 else pos_sign + + # Pad with fill character if necessary and return. + padding = fill * (minimumwidth - len(sign) - len(body)) + if align == ">": + return padding + sign + body + elif align == "<": + return sign + body + padding + elif align == "^": + half = len(padding) // 2 + return padding[:half] + sign + body + padding[half:] + else: # align == "=" + return sign + padding + body + + def _format_float_style(self, match): + """Helper method for __format__; handles float presentation types.""" fill = match["fill"] or " " align = match["align"] or ">" pos_sign = "" if match["sign"] == "-" else match["sign"] @@ -530,6 +562,23 @@ def __format__(self, format_spec, /): else: # align == "=" return sign + padding + body + def __format__(self, format_spec, /): + """Format this fraction according to the given format specification.""" + + if match := _GENERAL_FORMAT_SPECIFICATION_MATCHER(format_spec): + return self._format_general(match) + + if match := _FLOAT_FORMAT_SPECIFICATION_MATCHER(format_spec): + # Refuse the temptation to guess if both alignment _and_ + # zero padding are specified. + if match["align"] is None or match["zeropad"] is None: + return self._format_float_style(match) + + raise ValueError( + f"Invalid format specifier {format_spec!r} " + f"for object of type {type(self).__name__!r}" + ) + def _operator_fallbacks(monomorphic_operator, fallback_operator): """Generates forward and reverse operators given a purely-rational operator and a function from the operator module. diff --git a/Lib/http/client.py b/Lib/http/client.py index 7bb5d824bb6da4..5eebfccafbca59 100644 --- a/Lib/http/client.py +++ b/Lib/http/client.py @@ -665,6 +665,8 @@ def read1(self, n=-1): self._close_conn() elif self.length is not None: self.length -= len(result) + if not self.length: + self._close_conn() return result def peek(self, n=-1): @@ -689,6 +691,8 @@ def readline(self, limit=-1): self._close_conn() elif self.length is not None: self.length -= len(result) + if not self.length: + self._close_conn() return result def _read1_chunked(self, n): diff --git a/Lib/idlelib/News3.txt b/Lib/idlelib/News3.txt index 4fba4165fddab5..308865d968814c 100644 --- a/Lib/idlelib/News3.txt +++ b/Lib/idlelib/News3.txt @@ -4,6 +4,8 @@ Released on 2024-10-xx ========================= +gh-113269: Fix test_editor hang on macOS Catalina. + gh-112939: Fix processing unsaved files when quitting IDLE on macOS. Patch by Ronald Oussoren and Christopher Chavez. diff --git a/Lib/idlelib/idle_test/example_stub.pyi b/Lib/idlelib/idle_test/example_stub.pyi index a9811a78d10a6e..17b58010a9d8de 100644 --- a/Lib/idlelib/idle_test/example_stub.pyi +++ b/Lib/idlelib/idle_test/example_stub.pyi @@ -1,2 +1,4 @@ +" Example to test recognition of .pyi file as Python source code. + class Example: def method(self, argument1: str, argument2: list[int]) -> None: ... diff --git a/Lib/idlelib/idle_test/test_editor.py b/Lib/idlelib/idle_test/test_editor.py index 9296a6d235fbbe..0dfe2f3c58befa 100644 --- a/Lib/idlelib/idle_test/test_editor.py +++ b/Lib/idlelib/idle_test/test_editor.py @@ -95,7 +95,7 @@ def test_tabwidth_8(self): def insert(text, string): text.delete('1.0', 'end') text.insert('end', string) - text.update() # Force update for colorizer to finish. + text.update_idletasks() # Force update for colorizer to finish. class IndentAndNewlineTest(unittest.TestCase): diff --git a/Lib/idlelib/util.py b/Lib/idlelib/util.py index 5ac69a7b94cb56..a7ae74b0579004 100644 --- a/Lib/idlelib/util.py +++ b/Lib/idlelib/util.py @@ -13,9 +13,9 @@ * warning stuff (pyshell, run). """ -# .pyw is for Windows; .pyi is for stub files. -py_extensions = ('.py', '.pyw', '.pyi') # Order needed for open/save dialogs. - +# .pyw is for Windows; .pyi is for typing stub files. +# The extension order is needed for iomenu open/save dialogs. +py_extensions = ('.py', '.pyw', '.pyi') if __name__ == '__main__': from unittest import main diff --git a/Lib/json/decoder.py b/Lib/json/decoder.py index c5d9ae2d0d5d04..d69a45d6793069 100644 --- a/Lib/json/decoder.py +++ b/Lib/json/decoder.py @@ -200,10 +200,13 @@ def JSONObject(s_and_end, strict, scan_once, object_hook, object_pairs_hook, break elif nextchar != ',': raise JSONDecodeError("Expecting ',' delimiter", s, end - 1) + comma_idx = end - 1 end = _w(s, end).end() nextchar = s[end:end + 1] end += 1 if nextchar != '"': + if nextchar == '}': + raise JSONDecodeError("Illegal trailing comma before end of object", s, comma_idx) raise JSONDecodeError( "Expecting property name enclosed in double quotes", s, end - 1) if object_pairs_hook is not None: @@ -240,13 +243,17 @@ def JSONArray(s_and_end, scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR): break elif nextchar != ',': raise JSONDecodeError("Expecting ',' delimiter", s, end - 1) + comma_idx = end - 1 try: if s[end] in _ws: end += 1 if s[end] in _ws: end = _w(s, end + 1).end() + nextchar = s[end:end + 1] except IndexError: pass + if nextchar == ']': + raise JSONDecodeError("Illegal trailing comma before end of array", s, comma_idx) return values, end diff --git a/Lib/multiprocessing/popen_spawn_win32.py b/Lib/multiprocessing/popen_spawn_win32.py index af044305709e56..49d4c7eea22411 100644 --- a/Lib/multiprocessing/popen_spawn_win32.py +++ b/Lib/multiprocessing/popen_spawn_win32.py @@ -101,18 +101,20 @@ def duplicate_for_child(self, handle): return reduction.duplicate(handle, self.sentinel) def wait(self, timeout=None): - if self.returncode is None: - if timeout is None: - msecs = _winapi.INFINITE - else: - msecs = max(0, int(timeout * 1000 + 0.5)) - - res = _winapi.WaitForSingleObject(int(self._handle), msecs) - if res == _winapi.WAIT_OBJECT_0: - code = _winapi.GetExitCodeProcess(self._handle) - if code == TERMINATE: - code = -signal.SIGTERM - self.returncode = code + if self.returncode is not None: + return self.returncode + + if timeout is None: + msecs = _winapi.INFINITE + else: + msecs = max(0, int(timeout * 1000 + 0.5)) + + res = _winapi.WaitForSingleObject(int(self._handle), msecs) + if res == _winapi.WAIT_OBJECT_0: + code = _winapi.GetExitCodeProcess(self._handle) + if code == TERMINATE: + code = -signal.SIGTERM + self.returncode = code return self.returncode @@ -120,18 +122,22 @@ def poll(self): return self.wait(timeout=0) def terminate(self): - if self.returncode is None: - try: - _winapi.TerminateProcess(int(self._handle), TERMINATE) - except PermissionError: - # ERROR_ACCESS_DENIED (winerror 5) is received when the - # process already died. - code = _winapi.GetExitCodeProcess(int(self._handle)) - if code == _winapi.STILL_ACTIVE: - raise - self.returncode = code - else: - self.returncode = -signal.SIGTERM + if self.returncode is not None: + return + + try: + _winapi.TerminateProcess(int(self._handle), TERMINATE) + except PermissionError: + # ERROR_ACCESS_DENIED (winerror 5) is received when the + # process already died. + code = _winapi.GetExitCodeProcess(int(self._handle)) + if code == _winapi.STILL_ACTIVE: + raise + + # gh-113009: Don't set self.returncode. Even if GetExitCodeProcess() + # returns an exit code different than STILL_ACTIVE, the process can + # still be running. Only set self.returncode once WaitForSingleObject() + # returns WAIT_OBJECT_0 in wait(). kill = terminate diff --git a/Lib/subprocess.py b/Lib/subprocess.py index d6edd1a9807d1b..d5bd9a9e31aa04 100644 --- a/Lib/subprocess.py +++ b/Lib/subprocess.py @@ -748,6 +748,7 @@ def _use_posix_spawn(): # guarantee the given libc/syscall API will be used. _USE_POSIX_SPAWN = _use_posix_spawn() _USE_VFORK = True +_HAVE_POSIX_SPAWN_CLOSEFROM = hasattr(os, 'POSIX_SPAWN_CLOSEFROM') class Popen: @@ -1751,14 +1752,11 @@ def _get_handles(self, stdin, stdout, stderr): errread, errwrite) - def _posix_spawn(self, args, executable, env, restore_signals, + def _posix_spawn(self, args, executable, env, restore_signals, close_fds, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite): """Execute program using os.posix_spawn().""" - if env is None: - env = os.environ - kwargs = {} if restore_signals: # See _Py_RestoreSignals() in Python/pylifecycle.c @@ -1780,6 +1778,10 @@ def _posix_spawn(self, args, executable, env, restore_signals, ): if fd != -1: file_actions.append((os.POSIX_SPAWN_DUP2, fd, fd2)) + + if close_fds: + file_actions.append((os.POSIX_SPAWN_CLOSEFROM, 3)) + if file_actions: kwargs['file_actions'] = file_actions @@ -1827,7 +1829,7 @@ def _execute_child(self, args, executable, preexec_fn, close_fds, if (_USE_POSIX_SPAWN and os.path.dirname(executable) and preexec_fn is None - and not close_fds + and (not close_fds or _HAVE_POSIX_SPAWN_CLOSEFROM) and not pass_fds and cwd is None and (p2cread == -1 or p2cread > 2) @@ -1839,7 +1841,7 @@ def _execute_child(self, args, executable, preexec_fn, close_fds, and gids is None and uid is None and umask < 0): - self._posix_spawn(args, executable, env, restore_signals, + self._posix_spawn(args, executable, env, restore_signals, close_fds, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) diff --git a/Lib/test/doctest_lineno.py b/Lib/test/doctest_lineno.py index 729a68aceaa990..677c569cf710eb 100644 --- a/Lib/test/doctest_lineno.py +++ b/Lib/test/doctest_lineno.py @@ -49,5 +49,21 @@ def method_with_doctest(self): 'method_with_doctest' """ + @classmethod + def classmethod_with_doctest(cls): + """ + This has a doctest! + >>> MethodWrapper.classmethod_with_doctest.__name__ + 'classmethod_with_doctest' + """ + + @property + def property_with_doctest(self): + """ + This has a doctest! + >>> MethodWrapper.property_with_doctest.__name__ + 'property_with_doctest' + """ + # https://github.com/python/cpython/issues/99433 str_wrapper = object().__str__ diff --git a/Lib/test/test_clinic.py b/Lib/test/test_clinic.py index bcee215126fc6a..8b95502c5ba73c 100644 --- a/Lib/test/test_clinic.py +++ b/Lib/test/test_clinic.py @@ -2264,6 +2264,17 @@ class Foo "" "" expected_error = "Cannot apply both @getter and @setter to the same function!" self.expect_failure(block, expected_error, lineno=3) + def test_getset_no_class(self): + for annotation in "@getter", "@setter": + with self.subTest(annotation=annotation): + block = f""" + module m + {annotation} + m.func + """ + expected_error = "@getter and @setter must be methods" + self.expect_failure(block, expected_error, lineno=2) + def test_duplicate_coexist(self): err = "Called @coexist twice" block = """ diff --git a/Lib/test/test_compile.py b/Lib/test/test_compile.py index f681d125db7d7a..906e16cc9437fb 100644 --- a/Lib/test/test_compile.py +++ b/Lib/test/test_compile.py @@ -444,6 +444,10 @@ def f(): self.assertIn("_A__mangled_mod", A.f.__code__.co_varnames) self.assertIn("__package__", A.f.__code__.co_varnames) + def test_condition_expression_with_dead_blocks_compiles(self): + # See gh-113054 + compile('if (5 if 5 else T): 0', '', 'exec') + def test_compile_invalid_namedexpr(self): # gh-109351 m = ast.Module( diff --git a/Lib/test/test_dis.py b/Lib/test/test_dis.py index 12e2c57e50b0ba..e4c7e9ba4649f7 100644 --- a/Lib/test/test_dis.py +++ b/Lib/test/test_dis.py @@ -2,6 +2,7 @@ import contextlib import dis +import functools import io import re import sys @@ -1209,8 +1210,13 @@ def test_loop_quicken(self): got = self.get_disassembly(loop_test, adaptive=True) expected = dis_loop_test_quickened_code if _testinternalcapi.get_optimizer(): - # We *may* see ENTER_EXECUTOR in the disassembly - got = got.replace("ENTER_EXECUTOR", "JUMP_BACKWARD ") + # We *may* see ENTER_EXECUTOR in the disassembly. This is a + # temporary hack to keep the test working until dis is able to + # handle the instruction correctly (GH-112383): + got = got.replace( + "ENTER_EXECUTOR 16", + "JUMP_BACKWARD 16 (to L1)", + ) self.do_disassembly_compare(got, expected) @cpython_only @@ -1982,19 +1988,27 @@ def f(opcode, oparg, offset, *init_args): self.assertEqual(f(opcode.opmap["BINARY_OP"], 3, *args), (3, '<<')) self.assertEqual(f(opcode.opmap["CALL_INTRINSIC_1"], 2, *args), (2, 'INTRINSIC_IMPORT_STAR')) + def get_instructions(self, code): + return dis._get_instructions_bytes(code) + def test_start_offset(self): # When no extended args are present, # start_offset should be equal to offset + instructions = list(dis.Bytecode(_f)) for instruction in instructions: self.assertEqual(instruction.offset, instruction.start_offset) + def last_item(iterable): + return functools.reduce(lambda a, b : b, iterable) + code = bytes([ opcode.opmap["LOAD_FAST"], 0x00, opcode.opmap["EXTENDED_ARG"], 0x01, opcode.opmap["POP_JUMP_IF_TRUE"], 0xFF, ]) - jump = list(dis._get_instructions_bytes(code))[-1] + labels_map = dis._make_labels_map(code) + jump = last_item(self.get_instructions(code)) self.assertEqual(4, jump.offset) self.assertEqual(2, jump.start_offset) @@ -2006,7 +2020,7 @@ def test_start_offset(self): opcode.opmap["POP_JUMP_IF_TRUE"], 0xFF, opcode.opmap["CACHE"], 0x00, ]) - jump = list(dis._get_instructions_bytes(code))[-1] + jump = last_item(self.get_instructions(code)) self.assertEqual(8, jump.offset) self.assertEqual(2, jump.start_offset) @@ -2021,7 +2035,7 @@ def test_start_offset(self): opcode.opmap["POP_JUMP_IF_TRUE"], 0xFF, opcode.opmap["CACHE"], 0x00, ]) - instructions = list(dis._get_instructions_bytes(code)) + instructions = list(self.get_instructions(code)) # 1st jump self.assertEqual(4, instructions[2].offset) self.assertEqual(2, instructions[2].start_offset) @@ -2042,7 +2056,7 @@ def test_cache_offset_and_end_offset(self): opcode.opmap["CACHE"], 0x00, opcode.opmap["CACHE"], 0x00 ]) - instructions = list(dis._get_instructions_bytes(code)) + instructions = list(self.get_instructions(code)) self.assertEqual(2, instructions[0].cache_offset) self.assertEqual(10, instructions[0].end_offset) self.assertEqual(12, instructions[1].cache_offset) diff --git a/Lib/test/test_doctest.py b/Lib/test/test_doctest.py index 36328f8086c7ad..46a51007f9644d 100644 --- a/Lib/test/test_doctest.py +++ b/Lib/test/test_doctest.py @@ -670,9 +670,11 @@ def basics(): r""" 30 test.doctest_lineno.ClassWithDoctest None test.doctest_lineno.ClassWithoutDocstring None test.doctest_lineno.MethodWrapper + 53 test.doctest_lineno.MethodWrapper.classmethod_with_doctest 39 test.doctest_lineno.MethodWrapper.method_with_docstring 45 test.doctest_lineno.MethodWrapper.method_with_doctest None test.doctest_lineno.MethodWrapper.method_without_docstring + 61 test.doctest_lineno.MethodWrapper.property_with_doctest 4 test.doctest_lineno.func_with_docstring 12 test.doctest_lineno.func_with_doctest None test.doctest_lineno.func_without_docstring diff --git a/Lib/test/test_email/test_email.py b/Lib/test/test_email/test_email.py index 512464f87162cd..39d4ace8d4a1d8 100644 --- a/Lib/test/test_email/test_email.py +++ b/Lib/test/test_email/test_email.py @@ -16,6 +16,7 @@ import email import email.policy +import email.utils from email.charset import Charset from email.generator import Generator, DecodedGenerator, BytesGenerator @@ -3337,15 +3338,137 @@ def test_getaddresses_comma_in_name(self): ], ) + def test_parsing_errors(self): + """Test for parsing errors from CVE-2023-27043 and CVE-2019-16056""" + alice = 'alice@example.org' + bob = 'bob@example.com' + empty = ('', '') + + # Test utils.getaddresses() and utils.parseaddr() on malformed email + # addresses: default behavior (strict=True) rejects malformed address, + # and strict=False which tolerates malformed address. + for invalid_separator, expected_non_strict in ( + ('(', [(f'<{bob}>', alice)]), + (')', [('', alice), empty, ('', bob)]), + ('<', [('', alice), empty, ('', bob), empty]), + ('>', [('', alice), empty, ('', bob)]), + ('[', [('', f'{alice}[<{bob}>]')]), + (']', [('', alice), empty, ('', bob)]), + ('@', [empty, empty, ('', bob)]), + (';', [('', alice), empty, ('', bob)]), + (':', [('', alice), ('', bob)]), + ('.', [('', alice + '.'), ('', bob)]), + ('"', [('', alice), ('', f'<{bob}>')]), + ): + address = f'{alice}{invalid_separator}<{bob}>' + with self.subTest(address=address): + self.assertEqual(utils.getaddresses([address]), + [empty]) + self.assertEqual(utils.getaddresses([address], strict=False), + expected_non_strict) + + self.assertEqual(utils.parseaddr([address]), + empty) + self.assertEqual(utils.parseaddr([address], strict=False), + ('', address)) + + # Comma (',') is treated differently depending on strict parameter. + # Comma without quotes. + address = f'{alice},<{bob}>' + self.assertEqual(utils.getaddresses([address]), + [('', alice), ('', bob)]) + self.assertEqual(utils.getaddresses([address], strict=False), + [('', alice), ('', bob)]) + self.assertEqual(utils.parseaddr([address]), + empty) + self.assertEqual(utils.parseaddr([address], strict=False), + ('', address)) + + # Real name between quotes containing comma. + address = '"Alice, alice@example.org" ' + expected_strict = ('Alice, alice@example.org', 'bob@example.com') + self.assertEqual(utils.getaddresses([address]), [expected_strict]) + self.assertEqual(utils.getaddresses([address], strict=False), [expected_strict]) + self.assertEqual(utils.parseaddr([address]), expected_strict) + self.assertEqual(utils.parseaddr([address], strict=False), + ('', address)) + + # Valid parenthesis in comments. + address = 'alice@example.org (Alice)' + expected_strict = ('Alice', 'alice@example.org') + self.assertEqual(utils.getaddresses([address]), [expected_strict]) + self.assertEqual(utils.getaddresses([address], strict=False), [expected_strict]) + self.assertEqual(utils.parseaddr([address]), expected_strict) + self.assertEqual(utils.parseaddr([address], strict=False), + ('', address)) + + # Invalid parenthesis in comments. + address = 'alice@example.org )Alice(' + self.assertEqual(utils.getaddresses([address]), [empty]) + self.assertEqual(utils.getaddresses([address], strict=False), + [('', 'alice@example.org'), ('', ''), ('', 'Alice')]) + self.assertEqual(utils.parseaddr([address]), empty) + self.assertEqual(utils.parseaddr([address], strict=False), + ('', address)) + + # Two addresses with quotes separated by comma. + address = '"Jane Doe" , "John Doe" ' + self.assertEqual(utils.getaddresses([address]), + [('Jane Doe', 'jane@example.net'), + ('John Doe', 'john@example.net')]) + self.assertEqual(utils.getaddresses([address], strict=False), + [('Jane Doe', 'jane@example.net'), + ('John Doe', 'john@example.net')]) + self.assertEqual(utils.parseaddr([address]), empty) + self.assertEqual(utils.parseaddr([address], strict=False), + ('', address)) + + # Test email.utils.supports_strict_parsing attribute + self.assertEqual(email.utils.supports_strict_parsing, True) + def test_getaddresses_nasty(self): - eq = self.assertEqual - eq(utils.getaddresses(['foo: ;']), [('', '')]) - eq(utils.getaddresses( - ['[]*-- =~$']), - [('', ''), ('', ''), ('', '*--')]) - eq(utils.getaddresses( - ['foo: ;', '"Jason R. Mastaler" ']), - [('', ''), ('Jason R. Mastaler', 'jason@dom.ain')]) + for addresses, expected in ( + (['"Sürname, Firstname" '], + [('Sürname, Firstname', 'to@example.com')]), + + (['foo: ;'], + [('', '')]), + + (['foo: ;', '"Jason R. Mastaler" '], + [('', ''), ('Jason R. Mastaler', 'jason@dom.ain')]), + + ([r'Pete(A nice \) chap) '], + [('Pete (A nice ) chap his account his host)', 'pete@silly.test')]), + + (['(Empty list)(start)Undisclosed recipients :(nobody(I know))'], + [('', '')]), + + (['Mary <@machine.tld:mary@example.net>, , jdoe@test . example'], + [('Mary', 'mary@example.net'), ('', ''), ('', 'jdoe@test.example')]), + + (['John Doe '], + [('John Doe (comment)', 'jdoe@machine.example')]), + + (['"Mary Smith: Personal Account" '], + [('Mary Smith: Personal Account', 'smith@home.example')]), + + (['Undisclosed recipients:;'], + [('', '')]), + + ([r', "Giant; \"Big\" Box" '], + [('', 'boss@nil.test'), ('Giant; "Big" Box', 'bob@example.net')]), + ): + with self.subTest(addresses=addresses): + self.assertEqual(utils.getaddresses(addresses), + expected) + self.assertEqual(utils.getaddresses(addresses, strict=False), + expected) + + addresses = ['[]*-- =~$'] + self.assertEqual(utils.getaddresses(addresses), + [('', '')]) + self.assertEqual(utils.getaddresses(addresses, strict=False), + [('', ''), ('', ''), ('', '*--')]) def test_getaddresses_embedded_comment(self): """Test proper handling of a nested comment""" @@ -3536,6 +3659,54 @@ def test_mime_classes_policy_argument(self): m = cls(*constructor, policy=email.policy.default) self.assertIs(m.policy, email.policy.default) + def test_iter_escaped_chars(self): + self.assertEqual(list(utils._iter_escaped_chars(r'a\\b\"c\\"d')), + [(0, 'a'), + (2, '\\\\'), + (3, 'b'), + (5, '\\"'), + (6, 'c'), + (8, '\\\\'), + (9, '"'), + (10, 'd')]) + self.assertEqual(list(utils._iter_escaped_chars('a\\')), + [(0, 'a'), (1, '\\')]) + + def test_strip_quoted_realnames(self): + def check(addr, expected): + self.assertEqual(utils._strip_quoted_realnames(addr), expected) + + check('"Jane Doe" , "John Doe" ', + ' , ') + check(r'"Jane \"Doe\"." ', + ' ') + + # special cases + check(r'before"name"after', 'beforeafter') + check(r'before"name"', 'before') + check(r'b"name"', 'b') # single char + check(r'"name"after', 'after') + check(r'"name"a', 'a') # single char + check(r'"name"', '') + + # no change + for addr in ( + 'Jane Doe , John Doe ', + 'lone " quote', + ): + self.assertEqual(utils._strip_quoted_realnames(addr), addr) + + + def test_check_parenthesis(self): + addr = 'alice@example.net' + self.assertTrue(utils._check_parenthesis(f'{addr} (Alice)')) + self.assertFalse(utils._check_parenthesis(f'{addr} )Alice(')) + self.assertFalse(utils._check_parenthesis(f'{addr} (Alice))')) + self.assertFalse(utils._check_parenthesis(f'{addr} ((Alice)')) + + # Ignore real name between quotes + self.assertTrue(utils._check_parenthesis(f'")Alice((" {addr}')) + # Test the iterator/generators class TestIterators(TestEmailBase): diff --git a/Lib/test/test_fractions.py b/Lib/test/test_fractions.py index 499e3b6e656faa..84779526ce0eb0 100644 --- a/Lib/test/test_fractions.py +++ b/Lib/test/test_fractions.py @@ -849,12 +849,50 @@ def denominator(self): self.assertEqual(type(f.denominator), myint) def test_format_no_presentation_type(self): - # Triples (fraction, specification, expected_result) + # Triples (fraction, specification, expected_result). testcases = [ - (F(1, 3), '', '1/3'), - (F(-1, 3), '', '-1/3'), - (F(3), '', '3'), - (F(-3), '', '-3'), + # Explicit sign handling + (F(2, 3), '+', '+2/3'), + (F(-2, 3), '+', '-2/3'), + (F(3), '+', '+3'), + (F(-3), '+', '-3'), + (F(2, 3), ' ', ' 2/3'), + (F(-2, 3), ' ', '-2/3'), + (F(3), ' ', ' 3'), + (F(-3), ' ', '-3'), + (F(2, 3), '-', '2/3'), + (F(-2, 3), '-', '-2/3'), + (F(3), '-', '3'), + (F(-3), '-', '-3'), + # Padding + (F(0), '5', ' 0'), + (F(2, 3), '5', ' 2/3'), + (F(-2, 3), '5', ' -2/3'), + (F(2, 3), '0', '2/3'), + (F(2, 3), '1', '2/3'), + (F(2, 3), '2', '2/3'), + # Alignment + (F(2, 3), '<5', '2/3 '), + (F(2, 3), '>5', ' 2/3'), + (F(2, 3), '^5', ' 2/3 '), + (F(2, 3), '=5', ' 2/3'), + (F(-2, 3), '<5', '-2/3 '), + (F(-2, 3), '>5', ' -2/3'), + (F(-2, 3), '^5', '-2/3 '), + (F(-2, 3), '=5', '- 2/3'), + # Fill + (F(2, 3), 'X>5', 'XX2/3'), + (F(-2, 3), '.<5', '-2/3.'), + (F(-2, 3), '\n^6', '\n-2/3\n'), + # Thousands separators + (F(1234, 5679), ',', '1,234/5,679'), + (F(-1234, 5679), '_', '-1_234/5_679'), + (F(1234567), '_', '1_234_567'), + (F(-1234567), ',', '-1,234,567'), + # Alternate form forces a slash in the output + (F(123), '#', '123/1'), + (F(-123), '#', '-123/1'), + (F(0), '#', '0/1'), ] for fraction, spec, expected in testcases: with self.subTest(fraction=fraction, spec=spec): @@ -1218,6 +1256,10 @@ def test_invalid_formats(self): '.%', # Z instead of z for negative zero suppression 'Z.2f' + # z flag not supported for general formatting + 'z', + # zero padding not supported for general formatting + '05', ] for spec in invalid_specs: with self.subTest(spec=spec): diff --git a/Lib/test/test_generated_cases.py b/Lib/test/test_generated_cases.py index de96a8764594ba..3541a4e70b9a56 100644 --- a/Lib/test/test_generated_cases.py +++ b/Lib/test/test_generated_cases.py @@ -32,6 +32,7 @@ def skip_if_different_mount_drives(): import analysis import formatting from parsing import StackEffect + import tier1_generator def handle_stderr(): @@ -108,13 +109,12 @@ def run_cases_test(self, input: str, expected: str): temp_input.write(analysis.END_MARKER) temp_input.flush() - a = generate_cases.Generator([self.temp_input_filename]) with handle_stderr(): - a.parse() - a.analyze() - if a.errors: - raise RuntimeError(f"Found {a.errors} errors") - a.write_instructions(self.temp_output_filename, False) + tier1_generator.generate_tier1_from_files( + [self.temp_input_filename], + self.temp_output_filename, + False + ) with open(self.temp_output_filename) as temp_output: lines = temp_output.readlines() @@ -163,7 +163,7 @@ def test_inst_one_pop(self): PyObject *value; value = stack_pointer[-1]; spam(); - STACK_SHRINK(1); + stack_pointer += -1; DISPATCH(); } """ @@ -182,8 +182,8 @@ def test_inst_one_push(self): INSTRUCTION_STATS(OP); PyObject *res; spam(); - STACK_GROW(1); - stack_pointer[-1] = res; + stack_pointer[0] = res; + stack_pointer += 1; DISPATCH(); } """ @@ -227,8 +227,8 @@ def test_binary_op(self): right = stack_pointer[-1]; left = stack_pointer[-2]; spam(); - STACK_SHRINK(1); - stack_pointer[-1] = res; + stack_pointer[-2] = res; + stack_pointer += -1; DISPATCH(); } """ @@ -273,7 +273,6 @@ def test_predictions_and_eval_breaker(self): next_instr += 1; INSTRUCTION_STATS(OP1); PREDICTED(OP1); - static_assert(INLINE_CACHE_ENTRIES_OP1 == 0, "incorrect cache size"); PyObject *arg; PyObject *rest; arg = stack_pointer[-1]; @@ -285,6 +284,7 @@ def test_predictions_and_eval_breaker(self): frame->instr_ptr = next_instr; next_instr += 1; INSTRUCTION_STATS(OP3); + static_assert(INLINE_CACHE_ENTRIES_OP1 == 0, "incorrect cache size"); PyObject *arg; PyObject *res; arg = stack_pointer[-1]; @@ -325,6 +325,7 @@ def test_error_if_plain_with_comment(self): next_instr += 1; INSTRUCTION_STATS(OP); if (cond) goto label; + // Comment is ok DISPATCH(); } """ @@ -347,8 +348,8 @@ def test_error_if_pop(self): right = stack_pointer[-1]; left = stack_pointer[-2]; if (cond) goto pop_2_label; - STACK_SHRINK(1); - stack_pointer[-1] = res; + stack_pointer[-2] = res; + stack_pointer += -1; DISPATCH(); } """ @@ -368,7 +369,7 @@ def test_cache_effect(self): value = stack_pointer[-1]; uint16_t counter = read_u16(&this_instr[1].cache); uint32_t extra = read_u32(&this_instr[2].cache); - STACK_SHRINK(1); + stack_pointer += -1; DISPATCH(); } """ @@ -411,26 +412,26 @@ def test_macro_instruction(self): INSTRUCTION_STATS(OP); PREDICTED(OP); _Py_CODEUNIT *this_instr = next_instr - 6; - static_assert(INLINE_CACHE_ENTRIES_OP == 5, "incorrect cache size"); PyObject *right; PyObject *left; PyObject *arg2; PyObject *res; - // OP1 + // _OP1 right = stack_pointer[-1]; left = stack_pointer[-2]; { uint16_t counter = read_u16(&this_instr[1].cache); op1(left, right); } + /* Skip 2 cache entries */ // OP2 arg2 = stack_pointer[-3]; { uint32_t extra = read_u32(&this_instr[4].cache); res = op2(arg2, left, right); } - STACK_SHRINK(2); - stack_pointer[-1] = res; + stack_pointer[-3] = res; + stack_pointer += -2; DISPATCH(); } @@ -451,16 +452,37 @@ def test_macro_instruction(self): frame->instr_ptr = next_instr; next_instr += 6; INSTRUCTION_STATS(OP3); + static_assert(INLINE_CACHE_ENTRIES_OP == 5, "incorrect cache size"); PyObject *right; PyObject *left; PyObject *arg2; PyObject *res; + /* Skip 5 cache entries */ right = stack_pointer[-1]; left = stack_pointer[-2]; arg2 = stack_pointer[-3]; res = op3(arg2, left, right); - STACK_SHRINK(2); - stack_pointer[-1] = res; + stack_pointer[-3] = res; + stack_pointer += -2; + DISPATCH(); + } + """ + self.run_cases_test(input, output) + + def test_unused_caches(self): + input = """ + inst(OP, (unused/1, unused/2 --)) { + body(); + } + """ + output = """ + TARGET(OP) { + frame->instr_ptr = next_instr; + next_instr += 4; + INSTRUCTION_STATS(OP); + /* Skip 1 cache entry */ + /* Skip 2 cache entries */ + body(); DISPATCH(); } """ @@ -519,11 +541,10 @@ def test_array_input(self): PyObject **values; PyObject *below; above = stack_pointer[-1]; - values = stack_pointer - 1 - oparg*2; + values = &stack_pointer[-1 - oparg*2]; below = stack_pointer[-2 - oparg*2]; spam(); - STACK_SHRINK(oparg*2); - STACK_SHRINK(2); + stack_pointer += -2 - oparg*2; DISPATCH(); } """ @@ -543,11 +564,11 @@ def test_array_output(self): PyObject *below; PyObject **values; PyObject *above; - values = stack_pointer - 1; + values = &stack_pointer[-1]; spam(values, oparg); - STACK_GROW(oparg*3); - stack_pointer[-2 - oparg*3] = below; - stack_pointer[-1] = above; + stack_pointer[-2] = below; + stack_pointer[-1 + oparg*3] = above; + stack_pointer += oparg*3; DISPATCH(); } """ @@ -566,10 +587,10 @@ def test_array_input_output(self): INSTRUCTION_STATS(OP); PyObject **values; PyObject *above; - values = stack_pointer - oparg; + values = &stack_pointer[-oparg]; spam(values, oparg); - STACK_GROW(1); - stack_pointer[-1] = above; + stack_pointer[0] = above; + stack_pointer += 1; DISPATCH(); } """ @@ -588,11 +609,10 @@ def test_array_error_if(self): INSTRUCTION_STATS(OP); PyObject **values; PyObject *extra; - values = stack_pointer - oparg; + values = &stack_pointer[-oparg]; extra = stack_pointer[-1 - oparg]; - if (oparg == 0) { STACK_SHRINK(oparg); goto pop_1_somewhere; } - STACK_SHRINK(oparg); - STACK_SHRINK(1); + if (oparg == 0) { stack_pointer += -1 - oparg; goto somewhere; } + stack_pointer += -1 - oparg; DISPATCH(); } """ @@ -616,14 +636,13 @@ def test_cond_effect(self): PyObject *output = NULL; PyObject *zz; cc = stack_pointer[-1]; - if ((oparg & 1) == 1) { input = stack_pointer[-1 - ((oparg & 1) == 1 ? 1 : 0)]; } - aa = stack_pointer[-2 - ((oparg & 1) == 1 ? 1 : 0)]; + if ((oparg & 1) == 1) { input = stack_pointer[-1 - ((((oparg & 1) == 1) ? 1 : 0))]; } + aa = stack_pointer[-2 - ((((oparg & 1) == 1) ? 1 : 0))]; output = spam(oparg, input); - STACK_SHRINK((((oparg & 1) == 1) ? 1 : 0)); - STACK_GROW(((oparg & 2) ? 1 : 0)); - stack_pointer[-2 - (oparg & 2 ? 1 : 0)] = xx; - if (oparg & 2) { stack_pointer[-1 - (oparg & 2 ? 1 : 0)] = output; } - stack_pointer[-1] = zz; + stack_pointer[-2 - ((((oparg & 1) == 1) ? 1 : 0))] = xx; + if (oparg & 2) stack_pointer[-1 - ((((oparg & 1) == 1) ? 1 : 0))] = output; + stack_pointer[-1 - ((((oparg & 1) == 1) ? 1 : 0)) + (((oparg & 2) ? 1 : 0))] = zz; + stack_pointer += -((((oparg & 1) == 1) ? 1 : 0)) + (((oparg & 2) ? 1 : 0)); DISPATCH(); } """ @@ -661,11 +680,10 @@ def test_macro_cond_effect(self): { # Body of B } - STACK_SHRINK(1); - STACK_GROW((oparg ? 1 : 0)); - stack_pointer[-2 - (oparg ? 1 : 0)] = deep; - if (oparg) { stack_pointer[-1 - (oparg ? 1 : 0)] = extra; } - stack_pointer[-1] = res; + stack_pointer[-3] = deep; + if (oparg) stack_pointer[-2] = extra; + stack_pointer[-2 + (((oparg) ? 1 : 0))] = res; + stack_pointer += -1 + (((oparg) ? 1 : 0)); DISPATCH(); } """ @@ -696,9 +714,9 @@ def test_macro_push_push(self): { val2 = spam(); } - STACK_GROW(2); - stack_pointer[-2] = val1; - stack_pointer[-1] = val2; + stack_pointer[0] = val1; + stack_pointer[1] = val2; + stack_pointer += 2; DISPATCH(); } """ diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py index caa4c76a913a01..089bf5be40a0e2 100644 --- a/Lib/test/test_httplib.py +++ b/Lib/test/test_httplib.py @@ -1546,11 +1546,14 @@ def test_readline(self): resp = self.resp self._verify_readline(self.resp.readline, self.lines_expected) - def _verify_readline(self, readline, expected): + def test_readline_without_limit(self): + self._verify_readline(self.resp.readline, self.lines_expected, limit=-1) + + def _verify_readline(self, readline, expected, limit=5): all = [] while True: # short readlines - line = readline(5) + line = readline(limit) if line and line != b"foo": if len(line) < 5: self.assertTrue(line.endswith(b"\n")) @@ -1558,6 +1561,7 @@ def _verify_readline(self, readline, expected): if not line: break self.assertEqual(b"".join(all), expected) + self.assertTrue(self.resp.isclosed()) def test_read1(self): resp = self.resp @@ -1577,6 +1581,7 @@ def test_read1_unbounded(self): break all.append(data) self.assertEqual(b"".join(all), self.lines_expected) + self.assertTrue(resp.isclosed()) def test_read1_bounded(self): resp = self.resp @@ -1588,15 +1593,22 @@ def test_read1_bounded(self): self.assertLessEqual(len(data), 10) all.append(data) self.assertEqual(b"".join(all), self.lines_expected) + self.assertTrue(resp.isclosed()) def test_read1_0(self): self.assertEqual(self.resp.read1(0), b"") + self.assertFalse(self.resp.isclosed()) def test_peek_0(self): p = self.resp.peek(0) self.assertLessEqual(0, len(p)) +class ExtendedReadTestContentLengthKnown(ExtendedReadTest): + _header, _body = ExtendedReadTest.lines.split('\r\n\r\n', 1) + lines = _header + f'\r\nContent-Length: {len(_body)}\r\n\r\n' + _body + + class ExtendedReadTestChunked(ExtendedReadTest): """ Test peek(), read1(), readline() in chunked mode diff --git a/Lib/test/test_itertools.py b/Lib/test/test_itertools.py index 705e880d98685e..9af0730ea98004 100644 --- a/Lib/test/test_itertools.py +++ b/Lib/test/test_itertools.py @@ -187,7 +187,11 @@ def test_batched(self): [('A', 'B'), ('C', 'D'), ('E', 'F'), ('G',)]) self.assertEqual(list(batched('ABCDEFG', 1)), [('A',), ('B',), ('C',), ('D',), ('E',), ('F',), ('G',)]) + self.assertEqual(list(batched('ABCDEF', 2, strict=True)), + [('A', 'B'), ('C', 'D'), ('E', 'F')]) + with self.assertRaises(ValueError): # Incomplete batch when strict + list(batched('ABCDEFG', 3, strict=True)) with self.assertRaises(TypeError): # Too few arguments list(batched('ABCDEFG')) with self.assertRaises(TypeError): diff --git a/Lib/test/test_json/test_fail.py b/Lib/test/test_json/test_fail.py index efc982e8b0eb04..d6bce605e21463 100644 --- a/Lib/test/test_json/test_fail.py +++ b/Lib/test/test_json/test_fail.py @@ -143,11 +143,11 @@ def test_unexpected_data(self): ('{"spam":[}', 'Expecting value', 9), ('[42:', "Expecting ',' delimiter", 3), ('[42 "spam"', "Expecting ',' delimiter", 4), - ('[42,]', 'Expecting value', 4), + ('[42,]', "Illegal trailing comma before end of array", 3), ('{"spam":[42}', "Expecting ',' delimiter", 11), ('["]', 'Unterminated string starting at', 1), ('["spam":', "Expecting ',' delimiter", 7), - ('["spam",]', 'Expecting value', 8), + ('["spam",]', "Illegal trailing comma before end of array", 7), ('{:', 'Expecting property name enclosed in double quotes', 1), ('{,', 'Expecting property name enclosed in double quotes', 1), ('{42', 'Expecting property name enclosed in double quotes', 1), @@ -159,7 +159,9 @@ def test_unexpected_data(self): ('[{"spam":]', 'Expecting value', 9), ('{"spam":42 "ham"', "Expecting ',' delimiter", 11), ('[{"spam":42]', "Expecting ',' delimiter", 11), - ('{"spam":42,}', 'Expecting property name enclosed in double quotes', 11), + ('{"spam":42,}', "Illegal trailing comma before end of object", 10), + ('{"spam":42 , }', "Illegal trailing comma before end of object", 11), + ('[123 , ]', "Illegal trailing comma before end of array", 6), ] for data, msg, idx in test_cases: with self.assertRaises(self.JSONDecodeError) as cm: diff --git a/Lib/test/test_pathlib.py b/Lib/test/test_pathlib.py deleted file mode 100644 index a1acba406ea37c..00000000000000 --- a/Lib/test/test_pathlib.py +++ /dev/null @@ -1,3867 +0,0 @@ -import collections.abc -import io -import os -import sys -import errno -import pathlib -import pickle -import posixpath -import socket -import stat -import tempfile -import unittest -from unittest import mock -from urllib.request import pathname2url - -from test.support import import_helper -from test.support import set_recursion_limit -from test.support import is_emscripten, is_wasi -from test.support import os_helper -from test.support.os_helper import TESTFN, FakePath - -try: - import grp, pwd -except ImportError: - grp = pwd = None - - -class UnsupportedOperationTest(unittest.TestCase): - def test_is_notimplemented(self): - self.assertTrue(issubclass(pathlib.UnsupportedOperation, NotImplementedError)) - self.assertTrue(isinstance(pathlib.UnsupportedOperation(), NotImplementedError)) - - -# Make sure any symbolic links in the base test path are resolved. -BASE = os.path.realpath(TESTFN) -join = lambda *x: os.path.join(BASE, *x) -rel_join = lambda *x: os.path.join(TESTFN, *x) - -only_nt = unittest.skipIf(os.name != 'nt', - 'test requires a Windows-compatible system') -only_posix = unittest.skipIf(os.name == 'nt', - 'test requires a POSIX-compatible system') - -root_in_posix = False -if hasattr(os, 'geteuid'): - root_in_posix = (os.geteuid() == 0) - -# -# Tests for the pure classes. -# - - -class PurePathBaseTest(unittest.TestCase): - cls = pathlib._abc.PurePathBase - - def test_magic_methods(self): - P = self.cls - self.assertFalse(hasattr(P, '__fspath__')) - self.assertFalse(hasattr(P, '__bytes__')) - self.assertIs(P.__reduce__, object.__reduce__) - self.assertIs(P.__hash__, object.__hash__) - self.assertIs(P.__eq__, object.__eq__) - self.assertIs(P.__lt__, object.__lt__) - self.assertIs(P.__le__, object.__le__) - self.assertIs(P.__gt__, object.__gt__) - self.assertIs(P.__ge__, object.__ge__) - - -class DummyPurePath(pathlib._abc.PurePathBase): - def __eq__(self, other): - if not isinstance(other, DummyPurePath): - return NotImplemented - return str(self) == str(other) - - def __hash__(self): - return hash(str(self)) - - -class DummyPurePathTest(unittest.TestCase): - cls = DummyPurePath - - # Keys are canonical paths, values are list of tuples of arguments - # supposed to produce equal paths. - equivalences = { - 'a/b': [ - ('a', 'b'), ('a/', 'b'), ('a', 'b/'), ('a/', 'b/'), - ('a/b/',), ('a//b',), ('a//b//',), - # Empty components get removed. - ('', 'a', 'b'), ('a', '', 'b'), ('a', 'b', ''), - ], - '/b/c/d': [ - ('a', '/b/c', 'd'), ('/a', '/b/c', 'd'), - # Empty components get removed. - ('/', 'b', '', 'c/d'), ('/', '', 'b/c/d'), ('', '/b/c/d'), - ], - } - - def setUp(self): - p = self.cls('a') - self.pathmod = p.pathmod - self.sep = self.pathmod.sep - self.altsep = self.pathmod.altsep - - def test_constructor_common(self): - P = self.cls - p = P('a') - self.assertIsInstance(p, P) - P('a', 'b', 'c') - P('/a', 'b', 'c') - P('a/b/c') - P('/a/b/c') - - def test_concrete_class(self): - if self.cls is pathlib.PurePath: - expected = pathlib.PureWindowsPath if os.name == 'nt' else pathlib.PurePosixPath - else: - expected = self.cls - p = self.cls('a') - self.assertIs(type(p), expected) - - def test_different_pathmods_unequal(self): - p = self.cls('a') - if p.pathmod is posixpath: - q = pathlib.PureWindowsPath('a') - else: - q = pathlib.PurePosixPath('a') - self.assertNotEqual(p, q) - - def test_different_pathmods_unordered(self): - p = self.cls('a') - if p.pathmod is posixpath: - q = pathlib.PureWindowsPath('a') - else: - q = pathlib.PurePosixPath('a') - with self.assertRaises(TypeError): - p < q - with self.assertRaises(TypeError): - p <= q - with self.assertRaises(TypeError): - p > q - with self.assertRaises(TypeError): - p >= q - - def _check_str_subclass(self, *args): - # Issue #21127: it should be possible to construct a PurePath object - # from a str subclass instance, and it then gets converted to - # a pure str object. - class StrSubclass(str): - pass - P = self.cls - p = P(*(StrSubclass(x) for x in args)) - self.assertEqual(p, P(*args)) - for part in p.parts: - self.assertIs(type(part), str) - - def test_str_subclass_common(self): - self._check_str_subclass('') - self._check_str_subclass('.') - self._check_str_subclass('a') - self._check_str_subclass('a/b.txt') - self._check_str_subclass('/a/b.txt') - - def test_with_segments_common(self): - class P(self.cls): - def __init__(self, *pathsegments, session_id): - super().__init__(*pathsegments) - self.session_id = session_id - - def with_segments(self, *pathsegments): - return type(self)(*pathsegments, session_id=self.session_id) - p = P('foo', 'bar', session_id=42) - self.assertEqual(42, (p / 'foo').session_id) - self.assertEqual(42, ('foo' / p).session_id) - self.assertEqual(42, p.joinpath('foo').session_id) - self.assertEqual(42, p.with_name('foo').session_id) - self.assertEqual(42, p.with_stem('foo').session_id) - self.assertEqual(42, p.with_suffix('.foo').session_id) - self.assertEqual(42, p.with_segments('foo').session_id) - self.assertEqual(42, p.relative_to('foo').session_id) - self.assertEqual(42, p.parent.session_id) - for parent in p.parents: - self.assertEqual(42, parent.session_id) - - def _check_parse_path(self, raw_path, *expected): - sep = self.pathmod.sep - actual = self.cls._parse_path(raw_path.replace('/', sep)) - self.assertEqual(actual, expected) - if altsep := self.pathmod.altsep: - actual = self.cls._parse_path(raw_path.replace('/', altsep)) - self.assertEqual(actual, expected) - - def test_parse_path_common(self): - check = self._check_parse_path - sep = self.pathmod.sep - check('', '', '', []) - check('a', '', '', ['a']) - check('a/', '', '', ['a']) - check('a/b', '', '', ['a', 'b']) - check('a/b/', '', '', ['a', 'b']) - check('a/b/c/d', '', '', ['a', 'b', 'c', 'd']) - check('a/b//c/d', '', '', ['a', 'b', 'c', 'd']) - check('a/b/c/d', '', '', ['a', 'b', 'c', 'd']) - check('.', '', '', []) - check('././b', '', '', ['b']) - check('a/./b', '', '', ['a', 'b']) - check('a/./.', '', '', ['a']) - check('/a/b', '', sep, ['a', 'b']) - - def test_join_common(self): - P = self.cls - p = P('a/b') - pp = p.joinpath('c') - self.assertEqual(pp, P('a/b/c')) - self.assertIs(type(pp), type(p)) - pp = p.joinpath('c', 'd') - self.assertEqual(pp, P('a/b/c/d')) - pp = p.joinpath('/c') - self.assertEqual(pp, P('/c')) - - def test_div_common(self): - # Basically the same as joinpath(). - P = self.cls - p = P('a/b') - pp = p / 'c' - self.assertEqual(pp, P('a/b/c')) - self.assertIs(type(pp), type(p)) - pp = p / 'c/d' - self.assertEqual(pp, P('a/b/c/d')) - pp = p / 'c' / 'd' - self.assertEqual(pp, P('a/b/c/d')) - pp = 'c' / p / 'd' - self.assertEqual(pp, P('c/a/b/d')) - pp = p/ '/c' - self.assertEqual(pp, P('/c')) - - def _check_str(self, expected, args): - p = self.cls(*args) - self.assertEqual(str(p), expected.replace('/', self.sep)) - - def test_str_common(self): - # Canonicalized paths roundtrip. - for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'): - self._check_str(pathstr, (pathstr,)) - # Special case for the empty path. - self._check_str('.', ('',)) - # Other tests for str() are in test_equivalences(). - - def test_as_posix_common(self): - P = self.cls - for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'): - self.assertEqual(P(pathstr).as_posix(), pathstr) - # Other tests for as_posix() are in test_equivalences(). - - def test_repr_common(self): - for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'): - with self.subTest(pathstr=pathstr): - p = self.cls(pathstr) - clsname = p.__class__.__name__ - r = repr(p) - # The repr() is in the form ClassName("forward-slashes path"). - self.assertTrue(r.startswith(clsname + '('), r) - self.assertTrue(r.endswith(')'), r) - inner = r[len(clsname) + 1 : -1] - self.assertEqual(eval(inner), p.as_posix()) - - def test_eq_common(self): - P = self.cls - self.assertEqual(P('a/b'), P('a/b')) - self.assertEqual(P('a/b'), P('a', 'b')) - self.assertNotEqual(P('a/b'), P('a')) - self.assertNotEqual(P('a/b'), P('/a/b')) - self.assertNotEqual(P('a/b'), P()) - self.assertNotEqual(P('/a/b'), P('/')) - self.assertNotEqual(P(), P('/')) - self.assertNotEqual(P(), "") - self.assertNotEqual(P(), {}) - self.assertNotEqual(P(), int) - - def test_match_common(self): - P = self.cls - self.assertRaises(ValueError, P('a').match, '') - self.assertRaises(ValueError, P('a').match, '.') - # Simple relative pattern. - self.assertTrue(P('b.py').match('b.py')) - self.assertTrue(P('a/b.py').match('b.py')) - self.assertTrue(P('/a/b.py').match('b.py')) - self.assertFalse(P('a.py').match('b.py')) - self.assertFalse(P('b/py').match('b.py')) - self.assertFalse(P('/a.py').match('b.py')) - self.assertFalse(P('b.py/c').match('b.py')) - # Wildcard relative pattern. - self.assertTrue(P('b.py').match('*.py')) - self.assertTrue(P('a/b.py').match('*.py')) - self.assertTrue(P('/a/b.py').match('*.py')) - self.assertFalse(P('b.pyc').match('*.py')) - self.assertFalse(P('b./py').match('*.py')) - self.assertFalse(P('b.py/c').match('*.py')) - # Multi-part relative pattern. - self.assertTrue(P('ab/c.py').match('a*/*.py')) - self.assertTrue(P('/d/ab/c.py').match('a*/*.py')) - self.assertFalse(P('a.py').match('a*/*.py')) - self.assertFalse(P('/dab/c.py').match('a*/*.py')) - self.assertFalse(P('ab/c.py/d').match('a*/*.py')) - # Absolute pattern. - self.assertTrue(P('/b.py').match('/*.py')) - self.assertFalse(P('b.py').match('/*.py')) - self.assertFalse(P('a/b.py').match('/*.py')) - self.assertFalse(P('/a/b.py').match('/*.py')) - # Multi-part absolute pattern. - self.assertTrue(P('/a/b.py').match('/a/*.py')) - self.assertFalse(P('/ab.py').match('/a/*.py')) - self.assertFalse(P('/a/b/c.py').match('/a/*.py')) - # Multi-part glob-style pattern. - self.assertTrue(P('a').match('**')) - self.assertTrue(P('c.py').match('**')) - self.assertTrue(P('a/b/c.py').match('**')) - self.assertTrue(P('/a/b/c.py').match('**')) - self.assertTrue(P('/a/b/c.py').match('/**')) - self.assertTrue(P('/a/b/c.py').match('**/')) - self.assertTrue(P('/a/b/c.py').match('/a/**')) - self.assertTrue(P('/a/b/c.py').match('**/*.py')) - self.assertTrue(P('/a/b/c.py').match('/**/*.py')) - self.assertTrue(P('/a/b/c.py').match('/a/**/*.py')) - self.assertTrue(P('/a/b/c.py').match('/a/b/**/*.py')) - self.assertTrue(P('/a/b/c.py').match('/**/**/**/**/*.py')) - self.assertFalse(P('c.py').match('**/a.py')) - self.assertFalse(P('c.py').match('c/**')) - self.assertFalse(P('a/b/c.py').match('**/a')) - self.assertFalse(P('a/b/c.py').match('**/a/b')) - self.assertFalse(P('a/b/c.py').match('**/a/b/c')) - self.assertFalse(P('a/b/c.py').match('**/a/b/c.')) - self.assertFalse(P('a/b/c.py').match('**/a/b/c./**')) - self.assertFalse(P('a/b/c.py').match('**/a/b/c./**')) - self.assertFalse(P('a/b/c.py').match('/a/b/c.py/**')) - self.assertFalse(P('a/b/c.py').match('/**/a/b/c.py')) - self.assertRaises(ValueError, P('a').match, '**a/b/c') - self.assertRaises(ValueError, P('a').match, 'a/b/c**') - # Case-sensitive flag - self.assertFalse(P('A.py').match('a.PY', case_sensitive=True)) - self.assertTrue(P('A.py').match('a.PY', case_sensitive=False)) - self.assertFalse(P('c:/a/B.Py').match('C:/A/*.pY', case_sensitive=True)) - self.assertTrue(P('/a/b/c.py').match('/A/*/*.Py', case_sensitive=False)) - # Matching against empty path - self.assertFalse(P().match('*')) - self.assertTrue(P().match('**')) - self.assertFalse(P().match('**/*')) - - def test_parts_common(self): - # `parts` returns a tuple. - sep = self.sep - P = self.cls - p = P('a/b') - parts = p.parts - self.assertEqual(parts, ('a', 'b')) - # When the path is absolute, the anchor is a separate part. - p = P('/a/b') - parts = p.parts - self.assertEqual(parts, (sep, 'a', 'b')) - - def test_equivalences(self): - for k, tuples in self.equivalences.items(): - canon = k.replace('/', self.sep) - posix = k.replace(self.sep, '/') - if canon != posix: - tuples = tuples + [ - tuple(part.replace('/', self.sep) for part in t) - for t in tuples - ] - tuples.append((posix, )) - pcanon = self.cls(canon) - for t in tuples: - p = self.cls(*t) - self.assertEqual(p, pcanon, "failed with args {}".format(t)) - self.assertEqual(hash(p), hash(pcanon)) - self.assertEqual(str(p), canon) - self.assertEqual(p.as_posix(), posix) - - def test_parent_common(self): - # Relative - P = self.cls - p = P('a/b/c') - self.assertEqual(p.parent, P('a/b')) - self.assertEqual(p.parent.parent, P('a')) - self.assertEqual(p.parent.parent.parent, P()) - self.assertEqual(p.parent.parent.parent.parent, P()) - # Anchored - p = P('/a/b/c') - self.assertEqual(p.parent, P('/a/b')) - self.assertEqual(p.parent.parent, P('/a')) - self.assertEqual(p.parent.parent.parent, P('/')) - self.assertEqual(p.parent.parent.parent.parent, P('/')) - - def test_parents_common(self): - # Relative - P = self.cls - p = P('a/b/c') - par = p.parents - self.assertEqual(len(par), 3) - self.assertEqual(par[0], P('a/b')) - self.assertEqual(par[1], P('a')) - self.assertEqual(par[2], P('.')) - self.assertEqual(par[-1], P('.')) - self.assertEqual(par[-2], P('a')) - self.assertEqual(par[-3], P('a/b')) - self.assertEqual(par[0:1], (P('a/b'),)) - self.assertEqual(par[:2], (P('a/b'), P('a'))) - self.assertEqual(par[:-1], (P('a/b'), P('a'))) - self.assertEqual(par[1:], (P('a'), P('.'))) - self.assertEqual(par[::2], (P('a/b'), P('.'))) - self.assertEqual(par[::-1], (P('.'), P('a'), P('a/b'))) - self.assertEqual(list(par), [P('a/b'), P('a'), P('.')]) - with self.assertRaises(IndexError): - par[-4] - with self.assertRaises(IndexError): - par[3] - with self.assertRaises(TypeError): - par[0] = p - # Anchored - p = P('/a/b/c') - par = p.parents - self.assertEqual(len(par), 3) - self.assertEqual(par[0], P('/a/b')) - self.assertEqual(par[1], P('/a')) - self.assertEqual(par[2], P('/')) - self.assertEqual(par[-1], P('/')) - self.assertEqual(par[-2], P('/a')) - self.assertEqual(par[-3], P('/a/b')) - self.assertEqual(par[0:1], (P('/a/b'),)) - self.assertEqual(par[:2], (P('/a/b'), P('/a'))) - self.assertEqual(par[:-1], (P('/a/b'), P('/a'))) - self.assertEqual(par[1:], (P('/a'), P('/'))) - self.assertEqual(par[::2], (P('/a/b'), P('/'))) - self.assertEqual(par[::-1], (P('/'), P('/a'), P('/a/b'))) - self.assertEqual(list(par), [P('/a/b'), P('/a'), P('/')]) - with self.assertRaises(IndexError): - par[-4] - with self.assertRaises(IndexError): - par[3] - - def test_drive_common(self): - P = self.cls - self.assertEqual(P('a/b').drive, '') - self.assertEqual(P('/a/b').drive, '') - self.assertEqual(P('').drive, '') - - def test_root_common(self): - P = self.cls - sep = self.sep - self.assertEqual(P('').root, '') - self.assertEqual(P('a/b').root, '') - self.assertEqual(P('/').root, sep) - self.assertEqual(P('/a/b').root, sep) - - def test_anchor_common(self): - P = self.cls - sep = self.sep - self.assertEqual(P('').anchor, '') - self.assertEqual(P('a/b').anchor, '') - self.assertEqual(P('/').anchor, sep) - self.assertEqual(P('/a/b').anchor, sep) - - def test_name_common(self): - P = self.cls - self.assertEqual(P('').name, '') - self.assertEqual(P('.').name, '') - self.assertEqual(P('/').name, '') - self.assertEqual(P('a/b').name, 'b') - self.assertEqual(P('/a/b').name, 'b') - self.assertEqual(P('/a/b/.').name, 'b') - self.assertEqual(P('a/b.py').name, 'b.py') - self.assertEqual(P('/a/b.py').name, 'b.py') - - def test_suffix_common(self): - P = self.cls - self.assertEqual(P('').suffix, '') - self.assertEqual(P('.').suffix, '') - self.assertEqual(P('..').suffix, '') - self.assertEqual(P('/').suffix, '') - self.assertEqual(P('a/b').suffix, '') - self.assertEqual(P('/a/b').suffix, '') - self.assertEqual(P('/a/b/.').suffix, '') - self.assertEqual(P('a/b.py').suffix, '.py') - self.assertEqual(P('/a/b.py').suffix, '.py') - self.assertEqual(P('a/.hgrc').suffix, '') - self.assertEqual(P('/a/.hgrc').suffix, '') - self.assertEqual(P('a/.hg.rc').suffix, '.rc') - self.assertEqual(P('/a/.hg.rc').suffix, '.rc') - self.assertEqual(P('a/b.tar.gz').suffix, '.gz') - self.assertEqual(P('/a/b.tar.gz').suffix, '.gz') - self.assertEqual(P('a/Some name. Ending with a dot.').suffix, '') - self.assertEqual(P('/a/Some name. Ending with a dot.').suffix, '') - - def test_suffixes_common(self): - P = self.cls - self.assertEqual(P('').suffixes, []) - self.assertEqual(P('.').suffixes, []) - self.assertEqual(P('/').suffixes, []) - self.assertEqual(P('a/b').suffixes, []) - self.assertEqual(P('/a/b').suffixes, []) - self.assertEqual(P('/a/b/.').suffixes, []) - self.assertEqual(P('a/b.py').suffixes, ['.py']) - self.assertEqual(P('/a/b.py').suffixes, ['.py']) - self.assertEqual(P('a/.hgrc').suffixes, []) - self.assertEqual(P('/a/.hgrc').suffixes, []) - self.assertEqual(P('a/.hg.rc').suffixes, ['.rc']) - self.assertEqual(P('/a/.hg.rc').suffixes, ['.rc']) - self.assertEqual(P('a/b.tar.gz').suffixes, ['.tar', '.gz']) - self.assertEqual(P('/a/b.tar.gz').suffixes, ['.tar', '.gz']) - self.assertEqual(P('a/Some name. Ending with a dot.').suffixes, []) - self.assertEqual(P('/a/Some name. Ending with a dot.').suffixes, []) - - def test_stem_common(self): - P = self.cls - self.assertEqual(P('').stem, '') - self.assertEqual(P('.').stem, '') - self.assertEqual(P('..').stem, '..') - self.assertEqual(P('/').stem, '') - self.assertEqual(P('a/b').stem, 'b') - self.assertEqual(P('a/b.py').stem, 'b') - self.assertEqual(P('a/.hgrc').stem, '.hgrc') - self.assertEqual(P('a/.hg.rc').stem, '.hg') - self.assertEqual(P('a/b.tar.gz').stem, 'b.tar') - self.assertEqual(P('a/Some name. Ending with a dot.').stem, - 'Some name. Ending with a dot.') - - def test_with_name_common(self): - P = self.cls - self.assertEqual(P('a/b').with_name('d.xml'), P('a/d.xml')) - self.assertEqual(P('/a/b').with_name('d.xml'), P('/a/d.xml')) - self.assertEqual(P('a/b.py').with_name('d.xml'), P('a/d.xml')) - self.assertEqual(P('/a/b.py').with_name('d.xml'), P('/a/d.xml')) - self.assertEqual(P('a/Dot ending.').with_name('d.xml'), P('a/d.xml')) - self.assertEqual(P('/a/Dot ending.').with_name('d.xml'), P('/a/d.xml')) - self.assertRaises(ValueError, P('').with_name, 'd.xml') - self.assertRaises(ValueError, P('.').with_name, 'd.xml') - self.assertRaises(ValueError, P('/').with_name, 'd.xml') - self.assertRaises(ValueError, P('a/b').with_name, '') - self.assertRaises(ValueError, P('a/b').with_name, '.') - self.assertRaises(ValueError, P('a/b').with_name, '/c') - self.assertRaises(ValueError, P('a/b').with_name, 'c/') - self.assertRaises(ValueError, P('a/b').with_name, 'c/d') - - def test_with_stem_common(self): - P = self.cls - self.assertEqual(P('a/b').with_stem('d'), P('a/d')) - self.assertEqual(P('/a/b').with_stem('d'), P('/a/d')) - self.assertEqual(P('a/b.py').with_stem('d'), P('a/d.py')) - self.assertEqual(P('/a/b.py').with_stem('d'), P('/a/d.py')) - self.assertEqual(P('/a/b.tar.gz').with_stem('d'), P('/a/d.gz')) - self.assertEqual(P('a/Dot ending.').with_stem('d'), P('a/d')) - self.assertEqual(P('/a/Dot ending.').with_stem('d'), P('/a/d')) - self.assertRaises(ValueError, P('').with_stem, 'd') - self.assertRaises(ValueError, P('.').with_stem, 'd') - self.assertRaises(ValueError, P('/').with_stem, 'd') - self.assertRaises(ValueError, P('a/b').with_stem, '') - self.assertRaises(ValueError, P('a/b').with_stem, '.') - self.assertRaises(ValueError, P('a/b').with_stem, '/c') - self.assertRaises(ValueError, P('a/b').with_stem, 'c/') - self.assertRaises(ValueError, P('a/b').with_stem, 'c/d') - - def test_with_suffix_common(self): - P = self.cls - self.assertEqual(P('a/b').with_suffix('.gz'), P('a/b.gz')) - self.assertEqual(P('/a/b').with_suffix('.gz'), P('/a/b.gz')) - self.assertEqual(P('a/b.py').with_suffix('.gz'), P('a/b.gz')) - self.assertEqual(P('/a/b.py').with_suffix('.gz'), P('/a/b.gz')) - # Stripping suffix. - self.assertEqual(P('a/b.py').with_suffix(''), P('a/b')) - self.assertEqual(P('/a/b').with_suffix(''), P('/a/b')) - # Path doesn't have a "filename" component. - self.assertRaises(ValueError, P('').with_suffix, '.gz') - self.assertRaises(ValueError, P('.').with_suffix, '.gz') - self.assertRaises(ValueError, P('/').with_suffix, '.gz') - # Invalid suffix. - self.assertRaises(ValueError, P('a/b').with_suffix, 'gz') - self.assertRaises(ValueError, P('a/b').with_suffix, '/') - self.assertRaises(ValueError, P('a/b').with_suffix, '.') - self.assertRaises(ValueError, P('a/b').with_suffix, '/.gz') - self.assertRaises(ValueError, P('a/b').with_suffix, 'c/d') - self.assertRaises(ValueError, P('a/b').with_suffix, '.c/.d') - self.assertRaises(ValueError, P('a/b').with_suffix, './.d') - self.assertRaises(ValueError, P('a/b').with_suffix, '.d/.') - - def test_relative_to_common(self): - P = self.cls - p = P('a/b') - self.assertRaises(TypeError, p.relative_to) - self.assertRaises(TypeError, p.relative_to, b'a') - self.assertEqual(p.relative_to(P()), P('a/b')) - self.assertEqual(p.relative_to(''), P('a/b')) - self.assertEqual(p.relative_to(P('a')), P('b')) - self.assertEqual(p.relative_to('a'), P('b')) - self.assertEqual(p.relative_to('a/'), P('b')) - self.assertEqual(p.relative_to(P('a/b')), P()) - self.assertEqual(p.relative_to('a/b'), P()) - self.assertEqual(p.relative_to(P(), walk_up=True), P('a/b')) - self.assertEqual(p.relative_to('', walk_up=True), P('a/b')) - self.assertEqual(p.relative_to(P('a'), walk_up=True), P('b')) - self.assertEqual(p.relative_to('a', walk_up=True), P('b')) - self.assertEqual(p.relative_to('a/', walk_up=True), P('b')) - self.assertEqual(p.relative_to(P('a/b'), walk_up=True), P()) - self.assertEqual(p.relative_to('a/b', walk_up=True), P()) - self.assertEqual(p.relative_to(P('a/c'), walk_up=True), P('../b')) - self.assertEqual(p.relative_to('a/c', walk_up=True), P('../b')) - self.assertEqual(p.relative_to(P('a/b/c'), walk_up=True), P('..')) - self.assertEqual(p.relative_to('a/b/c', walk_up=True), P('..')) - self.assertEqual(p.relative_to(P('c'), walk_up=True), P('../a/b')) - self.assertEqual(p.relative_to('c', walk_up=True), P('../a/b')) - # With several args. - with self.assertWarns(DeprecationWarning): - p.relative_to('a', 'b') - p.relative_to('a', 'b', walk_up=True) - # Unrelated paths. - self.assertRaises(ValueError, p.relative_to, P('c')) - self.assertRaises(ValueError, p.relative_to, P('a/b/c')) - self.assertRaises(ValueError, p.relative_to, P('a/c')) - self.assertRaises(ValueError, p.relative_to, P('/a')) - self.assertRaises(ValueError, p.relative_to, P("../a")) - self.assertRaises(ValueError, p.relative_to, P("a/..")) - self.assertRaises(ValueError, p.relative_to, P("/a/..")) - self.assertRaises(ValueError, p.relative_to, P('/'), walk_up=True) - self.assertRaises(ValueError, p.relative_to, P('/a'), walk_up=True) - self.assertRaises(ValueError, p.relative_to, P("../a"), walk_up=True) - self.assertRaises(ValueError, p.relative_to, P("a/.."), walk_up=True) - self.assertRaises(ValueError, p.relative_to, P("/a/.."), walk_up=True) - p = P('/a/b') - self.assertEqual(p.relative_to(P('/')), P('a/b')) - self.assertEqual(p.relative_to('/'), P('a/b')) - self.assertEqual(p.relative_to(P('/a')), P('b')) - self.assertEqual(p.relative_to('/a'), P('b')) - self.assertEqual(p.relative_to('/a/'), P('b')) - self.assertEqual(p.relative_to(P('/a/b')), P()) - self.assertEqual(p.relative_to('/a/b'), P()) - self.assertEqual(p.relative_to(P('/'), walk_up=True), P('a/b')) - self.assertEqual(p.relative_to('/', walk_up=True), P('a/b')) - self.assertEqual(p.relative_to(P('/a'), walk_up=True), P('b')) - self.assertEqual(p.relative_to('/a', walk_up=True), P('b')) - self.assertEqual(p.relative_to('/a/', walk_up=True), P('b')) - self.assertEqual(p.relative_to(P('/a/b'), walk_up=True), P()) - self.assertEqual(p.relative_to('/a/b', walk_up=True), P()) - self.assertEqual(p.relative_to(P('/a/c'), walk_up=True), P('../b')) - self.assertEqual(p.relative_to('/a/c', walk_up=True), P('../b')) - self.assertEqual(p.relative_to(P('/a/b/c'), walk_up=True), P('..')) - self.assertEqual(p.relative_to('/a/b/c', walk_up=True), P('..')) - self.assertEqual(p.relative_to(P('/c'), walk_up=True), P('../a/b')) - self.assertEqual(p.relative_to('/c', walk_up=True), P('../a/b')) - # Unrelated paths. - self.assertRaises(ValueError, p.relative_to, P('/c')) - self.assertRaises(ValueError, p.relative_to, P('/a/b/c')) - self.assertRaises(ValueError, p.relative_to, P('/a/c')) - self.assertRaises(ValueError, p.relative_to, P()) - self.assertRaises(ValueError, p.relative_to, '') - self.assertRaises(ValueError, p.relative_to, P('a')) - self.assertRaises(ValueError, p.relative_to, P("../a")) - self.assertRaises(ValueError, p.relative_to, P("a/..")) - self.assertRaises(ValueError, p.relative_to, P("/a/..")) - self.assertRaises(ValueError, p.relative_to, P(''), walk_up=True) - self.assertRaises(ValueError, p.relative_to, P('a'), walk_up=True) - self.assertRaises(ValueError, p.relative_to, P("../a"), walk_up=True) - self.assertRaises(ValueError, p.relative_to, P("a/.."), walk_up=True) - self.assertRaises(ValueError, p.relative_to, P("/a/.."), walk_up=True) - - def test_is_relative_to_common(self): - P = self.cls - p = P('a/b') - self.assertRaises(TypeError, p.is_relative_to) - self.assertRaises(TypeError, p.is_relative_to, b'a') - self.assertTrue(p.is_relative_to(P())) - self.assertTrue(p.is_relative_to('')) - self.assertTrue(p.is_relative_to(P('a'))) - self.assertTrue(p.is_relative_to('a/')) - self.assertTrue(p.is_relative_to(P('a/b'))) - self.assertTrue(p.is_relative_to('a/b')) - # With several args. - with self.assertWarns(DeprecationWarning): - p.is_relative_to('a', 'b') - # Unrelated paths. - self.assertFalse(p.is_relative_to(P('c'))) - self.assertFalse(p.is_relative_to(P('a/b/c'))) - self.assertFalse(p.is_relative_to(P('a/c'))) - self.assertFalse(p.is_relative_to(P('/a'))) - p = P('/a/b') - self.assertTrue(p.is_relative_to(P('/'))) - self.assertTrue(p.is_relative_to('/')) - self.assertTrue(p.is_relative_to(P('/a'))) - self.assertTrue(p.is_relative_to('/a')) - self.assertTrue(p.is_relative_to('/a/')) - self.assertTrue(p.is_relative_to(P('/a/b'))) - self.assertTrue(p.is_relative_to('/a/b')) - # Unrelated paths. - self.assertFalse(p.is_relative_to(P('/c'))) - self.assertFalse(p.is_relative_to(P('/a/b/c'))) - self.assertFalse(p.is_relative_to(P('/a/c'))) - self.assertFalse(p.is_relative_to(P())) - self.assertFalse(p.is_relative_to('')) - self.assertFalse(p.is_relative_to(P('a'))) - - -class PurePathTest(DummyPurePathTest): - cls = pathlib.PurePath - - def test_constructor_nested(self): - P = self.cls - P(FakePath("a/b/c")) - self.assertEqual(P(P('a')), P('a')) - self.assertEqual(P(P('a'), 'b'), P('a/b')) - self.assertEqual(P(P('a'), P('b')), P('a/b')) - self.assertEqual(P(P('a'), P('b'), P('c')), P(FakePath("a/b/c"))) - self.assertEqual(P(P('./a:b')), P('./a:b')) - - def test_join_nested(self): - P = self.cls - p = P('a/b').joinpath(P('c')) - self.assertEqual(p, P('a/b/c')) - - def test_div_nested(self): - P = self.cls - p = P('a/b') / P('c') - self.assertEqual(p, P('a/b/c')) - - def test_pickling_common(self): - P = self.cls - p = P('/a/b') - for proto in range(0, pickle.HIGHEST_PROTOCOL + 1): - dumped = pickle.dumps(p, proto) - pp = pickle.loads(dumped) - self.assertIs(pp.__class__, p.__class__) - self.assertEqual(pp, p) - self.assertEqual(hash(pp), hash(p)) - self.assertEqual(str(pp), str(p)) - - def test_fspath_common(self): - P = self.cls - p = P('a/b') - self._check_str(p.__fspath__(), ('a/b',)) - self._check_str(os.fspath(p), ('a/b',)) - - def test_bytes(self): - P = self.cls - message = (r"argument should be a str or an os\.PathLike object " - r"where __fspath__ returns a str, not 'bytes'") - with self.assertRaisesRegex(TypeError, message): - P(b'a') - with self.assertRaisesRegex(TypeError, message): - P(b'a', 'b') - with self.assertRaisesRegex(TypeError, message): - P('a', b'b') - with self.assertRaises(TypeError): - P('a').joinpath(b'b') - with self.assertRaises(TypeError): - P('a') / b'b' - with self.assertRaises(TypeError): - b'a' / P('b') - with self.assertRaises(TypeError): - P('a').match(b'b') - with self.assertRaises(TypeError): - P('a').relative_to(b'b') - with self.assertRaises(TypeError): - P('a').with_name(b'b') - with self.assertRaises(TypeError): - P('a').with_stem(b'b') - with self.assertRaises(TypeError): - P('a').with_suffix(b'b') - - def test_as_bytes_common(self): - sep = os.fsencode(self.sep) - P = self.cls - self.assertEqual(bytes(P('a/b')), b'a' + sep + b'b') - - def test_ordering_common(self): - # Ordering is tuple-alike. - def assertLess(a, b): - self.assertLess(a, b) - self.assertGreater(b, a) - P = self.cls - a = P('a') - b = P('a/b') - c = P('abc') - d = P('b') - assertLess(a, b) - assertLess(a, c) - assertLess(a, d) - assertLess(b, c) - assertLess(c, d) - P = self.cls - a = P('/a') - b = P('/a/b') - c = P('/abc') - d = P('/b') - assertLess(a, b) - assertLess(a, c) - assertLess(a, d) - assertLess(b, c) - assertLess(c, d) - with self.assertRaises(TypeError): - P() < {} - - def test_as_uri_common(self): - P = self.cls - with self.assertRaises(ValueError): - P('a').as_uri() - with self.assertRaises(ValueError): - P().as_uri() - - def test_repr_roundtrips(self): - for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'): - with self.subTest(pathstr=pathstr): - p = self.cls(pathstr) - r = repr(p) - # The repr() roundtrips. - q = eval(r, pathlib.__dict__) - self.assertIs(q.__class__, p.__class__) - self.assertEqual(q, p) - self.assertEqual(repr(q), r) - - -class PurePosixPathTest(PurePathTest): - cls = pathlib.PurePosixPath - - def test_parse_path(self): - check = self._check_parse_path - # Collapsing of excess leading slashes, except for the double-slash - # special case. - check('//a/b', '', '//', ['a', 'b']) - check('///a/b', '', '/', ['a', 'b']) - check('////a/b', '', '/', ['a', 'b']) - # Paths which look like NT paths aren't treated specially. - check('c:a', '', '', ['c:a',]) - check('c:\\a', '', '', ['c:\\a',]) - check('\\a', '', '', ['\\a',]) - - def test_root(self): - P = self.cls - self.assertEqual(P('/a/b').root, '/') - self.assertEqual(P('///a/b').root, '/') - # POSIX special case for two leading slashes. - self.assertEqual(P('//a/b').root, '//') - - def test_eq(self): - P = self.cls - self.assertNotEqual(P('a/b'), P('A/b')) - self.assertEqual(P('/a'), P('///a')) - self.assertNotEqual(P('/a'), P('//a')) - - def test_as_uri(self): - P = self.cls - self.assertEqual(P('/').as_uri(), 'file:///') - self.assertEqual(P('/a/b.c').as_uri(), 'file:///a/b.c') - self.assertEqual(P('/a/b%#c').as_uri(), 'file:///a/b%25%23c') - - def test_as_uri_non_ascii(self): - from urllib.parse import quote_from_bytes - P = self.cls - try: - os.fsencode('\xe9') - except UnicodeEncodeError: - self.skipTest("\\xe9 cannot be encoded to the filesystem encoding") - self.assertEqual(P('/a/b\xe9').as_uri(), - 'file:///a/b' + quote_from_bytes(os.fsencode('\xe9'))) - - def test_match(self): - P = self.cls - self.assertFalse(P('A.py').match('a.PY')) - - def test_is_absolute(self): - P = self.cls - self.assertFalse(P().is_absolute()) - self.assertFalse(P('a').is_absolute()) - self.assertFalse(P('a/b/').is_absolute()) - self.assertTrue(P('/').is_absolute()) - self.assertTrue(P('/a').is_absolute()) - self.assertTrue(P('/a/b/').is_absolute()) - self.assertTrue(P('//a').is_absolute()) - self.assertTrue(P('//a/b').is_absolute()) - - def test_is_reserved(self): - P = self.cls - self.assertIs(False, P('').is_reserved()) - self.assertIs(False, P('/').is_reserved()) - self.assertIs(False, P('/foo/bar').is_reserved()) - self.assertIs(False, P('/dev/con/PRN/NUL').is_reserved()) - - def test_join(self): - P = self.cls - p = P('//a') - pp = p.joinpath('b') - self.assertEqual(pp, P('//a/b')) - pp = P('/a').joinpath('//c') - self.assertEqual(pp, P('//c')) - pp = P('//a').joinpath('/c') - self.assertEqual(pp, P('/c')) - - def test_div(self): - # Basically the same as joinpath(). - P = self.cls - p = P('//a') - pp = p / 'b' - self.assertEqual(pp, P('//a/b')) - pp = P('/a') / '//c' - self.assertEqual(pp, P('//c')) - pp = P('//a') / '/c' - self.assertEqual(pp, P('/c')) - - def test_parse_windows_path(self): - P = self.cls - p = P('c:', 'a', 'b') - pp = P(pathlib.PureWindowsPath('c:\\a\\b')) - self.assertEqual(p, pp) - - -class PureWindowsPathTest(PurePathTest): - cls = pathlib.PureWindowsPath - - equivalences = PurePathTest.equivalences.copy() - equivalences.update({ - './a:b': [ ('./a:b',) ], - 'c:a': [ ('c:', 'a'), ('c:', 'a/'), ('.', 'c:', 'a') ], - 'c:/a': [ - ('c:/', 'a'), ('c:', '/', 'a'), ('c:', '/a'), - ('/z', 'c:/', 'a'), ('//x/y', 'c:/', 'a'), - ], - '//a/b/': [ ('//a/b',) ], - '//a/b/c': [ - ('//a/b', 'c'), ('//a/b/', 'c'), - ], - }) - - def test_parse_path(self): - check = self._check_parse_path - # First part is anchored. - check('c:', 'c:', '', []) - check('c:/', 'c:', '\\', []) - check('/', '', '\\', []) - check('c:a', 'c:', '', ['a']) - check('c:/a', 'c:', '\\', ['a']) - check('/a', '', '\\', ['a']) - # UNC paths. - check('//', '\\\\', '', []) - check('//a', '\\\\a', '', []) - check('//a/', '\\\\a\\', '', []) - check('//a/b', '\\\\a\\b', '\\', []) - check('//a/b/', '\\\\a\\b', '\\', []) - check('//a/b/c', '\\\\a\\b', '\\', ['c']) - # Collapsing and stripping excess slashes. - check('Z://b//c/d/', 'Z:', '\\', ['b', 'c', 'd']) - # UNC paths. - check('//b/c//d', '\\\\b\\c', '\\', ['d']) - # Extended paths. - check('//./c:', '\\\\.\\c:', '', []) - check('//?/c:/', '\\\\?\\c:', '\\', []) - check('//?/c:/a', '\\\\?\\c:', '\\', ['a']) - # Extended UNC paths (format is "\\?\UNC\server\share"). - check('//?', '\\\\?', '', []) - check('//?/', '\\\\?\\', '', []) - check('//?/UNC', '\\\\?\\UNC', '', []) - check('//?/UNC/', '\\\\?\\UNC\\', '', []) - check('//?/UNC/b', '\\\\?\\UNC\\b', '', []) - check('//?/UNC/b/', '\\\\?\\UNC\\b\\', '', []) - check('//?/UNC/b/c', '\\\\?\\UNC\\b\\c', '\\', []) - check('//?/UNC/b/c/', '\\\\?\\UNC\\b\\c', '\\', []) - check('//?/UNC/b/c/d', '\\\\?\\UNC\\b\\c', '\\', ['d']) - # UNC device paths - check('//./BootPartition/', '\\\\.\\BootPartition', '\\', []) - check('//?/BootPartition/', '\\\\?\\BootPartition', '\\', []) - check('//./PhysicalDrive0', '\\\\.\\PhysicalDrive0', '', []) - check('//?/Volume{}/', '\\\\?\\Volume{}', '\\', []) - check('//./nul', '\\\\.\\nul', '', []) - # Paths to files with NTFS alternate data streams - check('./c:s', '', '', ['c:s']) - check('cc:s', '', '', ['cc:s']) - check('C:c:s', 'C:', '', ['c:s']) - check('C:/c:s', 'C:', '\\', ['c:s']) - check('D:a/c:b', 'D:', '', ['a', 'c:b']) - check('D:/a/c:b', 'D:', '\\', ['a', 'c:b']) - - def test_str(self): - p = self.cls('a/b/c') - self.assertEqual(str(p), 'a\\b\\c') - p = self.cls('c:/a/b/c') - self.assertEqual(str(p), 'c:\\a\\b\\c') - p = self.cls('//a/b') - self.assertEqual(str(p), '\\\\a\\b\\') - p = self.cls('//a/b/c') - self.assertEqual(str(p), '\\\\a\\b\\c') - p = self.cls('//a/b/c/d') - self.assertEqual(str(p), '\\\\a\\b\\c\\d') - - def test_str_subclass(self): - self._check_str_subclass('.\\a:b') - self._check_str_subclass('c:') - self._check_str_subclass('c:a') - self._check_str_subclass('c:a\\b.txt') - self._check_str_subclass('c:\\') - self._check_str_subclass('c:\\a') - self._check_str_subclass('c:\\a\\b.txt') - self._check_str_subclass('\\\\some\\share') - self._check_str_subclass('\\\\some\\share\\a') - self._check_str_subclass('\\\\some\\share\\a\\b.txt') - - def test_eq(self): - P = self.cls - self.assertEqual(P('c:a/b'), P('c:a/b')) - self.assertEqual(P('c:a/b'), P('c:', 'a', 'b')) - self.assertNotEqual(P('c:a/b'), P('d:a/b')) - self.assertNotEqual(P('c:a/b'), P('c:/a/b')) - self.assertNotEqual(P('/a/b'), P('c:/a/b')) - # Case-insensitivity. - self.assertEqual(P('a/B'), P('A/b')) - self.assertEqual(P('C:a/B'), P('c:A/b')) - self.assertEqual(P('//Some/SHARE/a/B'), P('//somE/share/A/b')) - self.assertEqual(P('\u0130'), P('i\u0307')) - - def test_as_uri(self): - P = self.cls - with self.assertRaises(ValueError): - P('/a/b').as_uri() - with self.assertRaises(ValueError): - P('c:a/b').as_uri() - self.assertEqual(P('c:/').as_uri(), 'file:///c:/') - self.assertEqual(P('c:/a/b.c').as_uri(), 'file:///c:/a/b.c') - self.assertEqual(P('c:/a/b%#c').as_uri(), 'file:///c:/a/b%25%23c') - self.assertEqual(P('c:/a/b\xe9').as_uri(), 'file:///c:/a/b%C3%A9') - self.assertEqual(P('//some/share/').as_uri(), 'file://some/share/') - self.assertEqual(P('//some/share/a/b.c').as_uri(), - 'file://some/share/a/b.c') - self.assertEqual(P('//some/share/a/b%#c\xe9').as_uri(), - 'file://some/share/a/b%25%23c%C3%A9') - - def test_match(self): - P = self.cls - # Absolute patterns. - self.assertTrue(P('c:/b.py').match('*:/*.py')) - self.assertTrue(P('c:/b.py').match('c:/*.py')) - self.assertFalse(P('d:/b.py').match('c:/*.py')) # wrong drive - self.assertFalse(P('b.py').match('/*.py')) - self.assertFalse(P('b.py').match('c:*.py')) - self.assertFalse(P('b.py').match('c:/*.py')) - self.assertFalse(P('c:b.py').match('/*.py')) - self.assertFalse(P('c:b.py').match('c:/*.py')) - self.assertFalse(P('/b.py').match('c:*.py')) - self.assertFalse(P('/b.py').match('c:/*.py')) - # UNC patterns. - self.assertTrue(P('//some/share/a.py').match('//*/*/*.py')) - self.assertTrue(P('//some/share/a.py').match('//some/share/*.py')) - self.assertFalse(P('//other/share/a.py').match('//some/share/*.py')) - self.assertFalse(P('//some/share/a/b.py').match('//some/share/*.py')) - # Case-insensitivity. - self.assertTrue(P('B.py').match('b.PY')) - self.assertTrue(P('c:/a/B.Py').match('C:/A/*.pY')) - self.assertTrue(P('//Some/Share/B.Py').match('//somE/sharE/*.pY')) - # Path anchor doesn't match pattern anchor - self.assertFalse(P('c:/b.py').match('/*.py')) # 'c:/' vs '/' - self.assertFalse(P('c:/b.py').match('c:*.py')) # 'c:/' vs 'c:' - self.assertFalse(P('//some/share/a.py').match('/*.py')) # '//some/share/' vs '/' - - def test_ordering_common(self): - # Case-insensitivity. - def assertOrderedEqual(a, b): - self.assertLessEqual(a, b) - self.assertGreaterEqual(b, a) - P = self.cls - p = P('c:A/b') - q = P('C:a/B') - assertOrderedEqual(p, q) - self.assertFalse(p < q) - self.assertFalse(p > q) - p = P('//some/Share/A/b') - q = P('//Some/SHARE/a/B') - assertOrderedEqual(p, q) - self.assertFalse(p < q) - self.assertFalse(p > q) - - def test_parts(self): - P = self.cls - p = P('c:a/b') - parts = p.parts - self.assertEqual(parts, ('c:', 'a', 'b')) - p = P('c:/a/b') - parts = p.parts - self.assertEqual(parts, ('c:\\', 'a', 'b')) - p = P('//a/b/c/d') - parts = p.parts - self.assertEqual(parts, ('\\\\a\\b\\', 'c', 'd')) - - def test_parent(self): - # Anchored - P = self.cls - p = P('z:a/b/c') - self.assertEqual(p.parent, P('z:a/b')) - self.assertEqual(p.parent.parent, P('z:a')) - self.assertEqual(p.parent.parent.parent, P('z:')) - self.assertEqual(p.parent.parent.parent.parent, P('z:')) - p = P('z:/a/b/c') - self.assertEqual(p.parent, P('z:/a/b')) - self.assertEqual(p.parent.parent, P('z:/a')) - self.assertEqual(p.parent.parent.parent, P('z:/')) - self.assertEqual(p.parent.parent.parent.parent, P('z:/')) - p = P('//a/b/c/d') - self.assertEqual(p.parent, P('//a/b/c')) - self.assertEqual(p.parent.parent, P('//a/b')) - self.assertEqual(p.parent.parent.parent, P('//a/b')) - - def test_parents(self): - # Anchored - P = self.cls - p = P('z:a/b/') - par = p.parents - self.assertEqual(len(par), 2) - self.assertEqual(par[0], P('z:a')) - self.assertEqual(par[1], P('z:')) - self.assertEqual(par[0:1], (P('z:a'),)) - self.assertEqual(par[:-1], (P('z:a'),)) - self.assertEqual(par[:2], (P('z:a'), P('z:'))) - self.assertEqual(par[1:], (P('z:'),)) - self.assertEqual(par[::2], (P('z:a'),)) - self.assertEqual(par[::-1], (P('z:'), P('z:a'))) - self.assertEqual(list(par), [P('z:a'), P('z:')]) - with self.assertRaises(IndexError): - par[2] - p = P('z:/a/b/') - par = p.parents - self.assertEqual(len(par), 2) - self.assertEqual(par[0], P('z:/a')) - self.assertEqual(par[1], P('z:/')) - self.assertEqual(par[0:1], (P('z:/a'),)) - self.assertEqual(par[0:-1], (P('z:/a'),)) - self.assertEqual(par[:2], (P('z:/a'), P('z:/'))) - self.assertEqual(par[1:], (P('z:/'),)) - self.assertEqual(par[::2], (P('z:/a'),)) - self.assertEqual(par[::-1], (P('z:/'), P('z:/a'),)) - self.assertEqual(list(par), [P('z:/a'), P('z:/')]) - with self.assertRaises(IndexError): - par[2] - p = P('//a/b/c/d') - par = p.parents - self.assertEqual(len(par), 2) - self.assertEqual(par[0], P('//a/b/c')) - self.assertEqual(par[1], P('//a/b')) - self.assertEqual(par[0:1], (P('//a/b/c'),)) - self.assertEqual(par[0:-1], (P('//a/b/c'),)) - self.assertEqual(par[:2], (P('//a/b/c'), P('//a/b'))) - self.assertEqual(par[1:], (P('//a/b'),)) - self.assertEqual(par[::2], (P('//a/b/c'),)) - self.assertEqual(par[::-1], (P('//a/b'), P('//a/b/c'))) - self.assertEqual(list(par), [P('//a/b/c'), P('//a/b')]) - with self.assertRaises(IndexError): - par[2] - - def test_drive(self): - P = self.cls - self.assertEqual(P('c:').drive, 'c:') - self.assertEqual(P('c:a/b').drive, 'c:') - self.assertEqual(P('c:/').drive, 'c:') - self.assertEqual(P('c:/a/b/').drive, 'c:') - self.assertEqual(P('//a/b').drive, '\\\\a\\b') - self.assertEqual(P('//a/b/').drive, '\\\\a\\b') - self.assertEqual(P('//a/b/c/d').drive, '\\\\a\\b') - self.assertEqual(P('./c:a').drive, '') - - def test_root(self): - P = self.cls - self.assertEqual(P('c:').root, '') - self.assertEqual(P('c:a/b').root, '') - self.assertEqual(P('c:/').root, '\\') - self.assertEqual(P('c:/a/b/').root, '\\') - self.assertEqual(P('//a/b').root, '\\') - self.assertEqual(P('//a/b/').root, '\\') - self.assertEqual(P('//a/b/c/d').root, '\\') - - def test_anchor(self): - P = self.cls - self.assertEqual(P('c:').anchor, 'c:') - self.assertEqual(P('c:a/b').anchor, 'c:') - self.assertEqual(P('c:/').anchor, 'c:\\') - self.assertEqual(P('c:/a/b/').anchor, 'c:\\') - self.assertEqual(P('//a/b').anchor, '\\\\a\\b\\') - self.assertEqual(P('//a/b/').anchor, '\\\\a\\b\\') - self.assertEqual(P('//a/b/c/d').anchor, '\\\\a\\b\\') - - def test_name(self): - P = self.cls - self.assertEqual(P('c:').name, '') - self.assertEqual(P('c:/').name, '') - self.assertEqual(P('c:a/b').name, 'b') - self.assertEqual(P('c:/a/b').name, 'b') - self.assertEqual(P('c:a/b.py').name, 'b.py') - self.assertEqual(P('c:/a/b.py').name, 'b.py') - self.assertEqual(P('//My.py/Share.php').name, '') - self.assertEqual(P('//My.py/Share.php/a/b').name, 'b') - - def test_suffix(self): - P = self.cls - self.assertEqual(P('c:').suffix, '') - self.assertEqual(P('c:/').suffix, '') - self.assertEqual(P('c:a/b').suffix, '') - self.assertEqual(P('c:/a/b').suffix, '') - self.assertEqual(P('c:a/b.py').suffix, '.py') - self.assertEqual(P('c:/a/b.py').suffix, '.py') - self.assertEqual(P('c:a/.hgrc').suffix, '') - self.assertEqual(P('c:/a/.hgrc').suffix, '') - self.assertEqual(P('c:a/.hg.rc').suffix, '.rc') - self.assertEqual(P('c:/a/.hg.rc').suffix, '.rc') - self.assertEqual(P('c:a/b.tar.gz').suffix, '.gz') - self.assertEqual(P('c:/a/b.tar.gz').suffix, '.gz') - self.assertEqual(P('c:a/Some name. Ending with a dot.').suffix, '') - self.assertEqual(P('c:/a/Some name. Ending with a dot.').suffix, '') - self.assertEqual(P('//My.py/Share.php').suffix, '') - self.assertEqual(P('//My.py/Share.php/a/b').suffix, '') - - def test_suffixes(self): - P = self.cls - self.assertEqual(P('c:').suffixes, []) - self.assertEqual(P('c:/').suffixes, []) - self.assertEqual(P('c:a/b').suffixes, []) - self.assertEqual(P('c:/a/b').suffixes, []) - self.assertEqual(P('c:a/b.py').suffixes, ['.py']) - self.assertEqual(P('c:/a/b.py').suffixes, ['.py']) - self.assertEqual(P('c:a/.hgrc').suffixes, []) - self.assertEqual(P('c:/a/.hgrc').suffixes, []) - self.assertEqual(P('c:a/.hg.rc').suffixes, ['.rc']) - self.assertEqual(P('c:/a/.hg.rc').suffixes, ['.rc']) - self.assertEqual(P('c:a/b.tar.gz').suffixes, ['.tar', '.gz']) - self.assertEqual(P('c:/a/b.tar.gz').suffixes, ['.tar', '.gz']) - self.assertEqual(P('//My.py/Share.php').suffixes, []) - self.assertEqual(P('//My.py/Share.php/a/b').suffixes, []) - self.assertEqual(P('c:a/Some name. Ending with a dot.').suffixes, []) - self.assertEqual(P('c:/a/Some name. Ending with a dot.').suffixes, []) - - def test_stem(self): - P = self.cls - self.assertEqual(P('c:').stem, '') - self.assertEqual(P('c:.').stem, '') - self.assertEqual(P('c:..').stem, '..') - self.assertEqual(P('c:/').stem, '') - self.assertEqual(P('c:a/b').stem, 'b') - self.assertEqual(P('c:a/b.py').stem, 'b') - self.assertEqual(P('c:a/.hgrc').stem, '.hgrc') - self.assertEqual(P('c:a/.hg.rc').stem, '.hg') - self.assertEqual(P('c:a/b.tar.gz').stem, 'b.tar') - self.assertEqual(P('c:a/Some name. Ending with a dot.').stem, - 'Some name. Ending with a dot.') - - def test_with_name(self): - P = self.cls - self.assertEqual(P('c:a/b').with_name('d.xml'), P('c:a/d.xml')) - self.assertEqual(P('c:/a/b').with_name('d.xml'), P('c:/a/d.xml')) - self.assertEqual(P('c:a/Dot ending.').with_name('d.xml'), P('c:a/d.xml')) - self.assertEqual(P('c:/a/Dot ending.').with_name('d.xml'), P('c:/a/d.xml')) - self.assertRaises(ValueError, P('c:').with_name, 'd.xml') - self.assertRaises(ValueError, P('c:/').with_name, 'd.xml') - self.assertRaises(ValueError, P('//My/Share').with_name, 'd.xml') - self.assertEqual(str(P('a').with_name('d:')), '.\\d:') - self.assertEqual(str(P('a').with_name('d:e')), '.\\d:e') - self.assertEqual(P('c:a/b').with_name('d:'), P('c:a/d:')) - self.assertEqual(P('c:a/b').with_name('d:e'), P('c:a/d:e')) - self.assertRaises(ValueError, P('c:a/b').with_name, 'd:/e') - self.assertRaises(ValueError, P('c:a/b').with_name, '//My/Share') - - def test_with_stem(self): - P = self.cls - self.assertEqual(P('c:a/b').with_stem('d'), P('c:a/d')) - self.assertEqual(P('c:/a/b').with_stem('d'), P('c:/a/d')) - self.assertEqual(P('c:a/Dot ending.').with_stem('d'), P('c:a/d')) - self.assertEqual(P('c:/a/Dot ending.').with_stem('d'), P('c:/a/d')) - self.assertRaises(ValueError, P('c:').with_stem, 'd') - self.assertRaises(ValueError, P('c:/').with_stem, 'd') - self.assertRaises(ValueError, P('//My/Share').with_stem, 'd') - self.assertEqual(str(P('a').with_stem('d:')), '.\\d:') - self.assertEqual(str(P('a').with_stem('d:e')), '.\\d:e') - self.assertEqual(P('c:a/b').with_stem('d:'), P('c:a/d:')) - self.assertEqual(P('c:a/b').with_stem('d:e'), P('c:a/d:e')) - self.assertRaises(ValueError, P('c:a/b').with_stem, 'd:/e') - self.assertRaises(ValueError, P('c:a/b').with_stem, '//My/Share') - - def test_with_suffix(self): - P = self.cls - self.assertEqual(P('c:a/b').with_suffix('.gz'), P('c:a/b.gz')) - self.assertEqual(P('c:/a/b').with_suffix('.gz'), P('c:/a/b.gz')) - self.assertEqual(P('c:a/b.py').with_suffix('.gz'), P('c:a/b.gz')) - self.assertEqual(P('c:/a/b.py').with_suffix('.gz'), P('c:/a/b.gz')) - # Path doesn't have a "filename" component. - self.assertRaises(ValueError, P('').with_suffix, '.gz') - self.assertRaises(ValueError, P('.').with_suffix, '.gz') - self.assertRaises(ValueError, P('/').with_suffix, '.gz') - self.assertRaises(ValueError, P('//My/Share').with_suffix, '.gz') - # Invalid suffix. - self.assertRaises(ValueError, P('c:a/b').with_suffix, 'gz') - self.assertRaises(ValueError, P('c:a/b').with_suffix, '/') - self.assertRaises(ValueError, P('c:a/b').with_suffix, '\\') - self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c:') - self.assertRaises(ValueError, P('c:a/b').with_suffix, '/.gz') - self.assertRaises(ValueError, P('c:a/b').with_suffix, '\\.gz') - self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c:.gz') - self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c/d') - self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c\\d') - self.assertRaises(ValueError, P('c:a/b').with_suffix, '.c/d') - self.assertRaises(ValueError, P('c:a/b').with_suffix, '.c\\d') - - def test_relative_to(self): - P = self.cls - p = P('C:Foo/Bar') - self.assertEqual(p.relative_to(P('c:')), P('Foo/Bar')) - self.assertEqual(p.relative_to('c:'), P('Foo/Bar')) - self.assertEqual(p.relative_to(P('c:foO')), P('Bar')) - self.assertEqual(p.relative_to('c:foO'), P('Bar')) - self.assertEqual(p.relative_to('c:foO/'), P('Bar')) - self.assertEqual(p.relative_to(P('c:foO/baR')), P()) - self.assertEqual(p.relative_to('c:foO/baR'), P()) - self.assertEqual(p.relative_to(P('c:'), walk_up=True), P('Foo/Bar')) - self.assertEqual(p.relative_to('c:', walk_up=True), P('Foo/Bar')) - self.assertEqual(p.relative_to(P('c:foO'), walk_up=True), P('Bar')) - self.assertEqual(p.relative_to('c:foO', walk_up=True), P('Bar')) - self.assertEqual(p.relative_to('c:foO/', walk_up=True), P('Bar')) - self.assertEqual(p.relative_to(P('c:foO/baR'), walk_up=True), P()) - self.assertEqual(p.relative_to('c:foO/baR', walk_up=True), P()) - self.assertEqual(p.relative_to(P('C:Foo/Bar/Baz'), walk_up=True), P('..')) - self.assertEqual(p.relative_to(P('C:Foo/Baz'), walk_up=True), P('../Bar')) - self.assertEqual(p.relative_to(P('C:Baz/Bar'), walk_up=True), P('../../Foo/Bar')) - # Unrelated paths. - self.assertRaises(ValueError, p.relative_to, P()) - self.assertRaises(ValueError, p.relative_to, '') - self.assertRaises(ValueError, p.relative_to, P('d:')) - self.assertRaises(ValueError, p.relative_to, P('/')) - self.assertRaises(ValueError, p.relative_to, P('Foo')) - self.assertRaises(ValueError, p.relative_to, P('/Foo')) - self.assertRaises(ValueError, p.relative_to, P('C:/Foo')) - self.assertRaises(ValueError, p.relative_to, P('C:Foo/Bar/Baz')) - self.assertRaises(ValueError, p.relative_to, P('C:Foo/Baz')) - self.assertRaises(ValueError, p.relative_to, P(), walk_up=True) - self.assertRaises(ValueError, p.relative_to, '', walk_up=True) - self.assertRaises(ValueError, p.relative_to, P('d:'), walk_up=True) - self.assertRaises(ValueError, p.relative_to, P('/'), walk_up=True) - self.assertRaises(ValueError, p.relative_to, P('Foo'), walk_up=True) - self.assertRaises(ValueError, p.relative_to, P('/Foo'), walk_up=True) - self.assertRaises(ValueError, p.relative_to, P('C:/Foo'), walk_up=True) - p = P('C:/Foo/Bar') - self.assertEqual(p.relative_to(P('c:/')), P('Foo/Bar')) - self.assertEqual(p.relative_to('c:/'), P('Foo/Bar')) - self.assertEqual(p.relative_to(P('c:/foO')), P('Bar')) - self.assertEqual(p.relative_to('c:/foO'), P('Bar')) - self.assertEqual(p.relative_to('c:/foO/'), P('Bar')) - self.assertEqual(p.relative_to(P('c:/foO/baR')), P()) - self.assertEqual(p.relative_to('c:/foO/baR'), P()) - self.assertEqual(p.relative_to(P('c:/'), walk_up=True), P('Foo/Bar')) - self.assertEqual(p.relative_to('c:/', walk_up=True), P('Foo/Bar')) - self.assertEqual(p.relative_to(P('c:/foO'), walk_up=True), P('Bar')) - self.assertEqual(p.relative_to('c:/foO', walk_up=True), P('Bar')) - self.assertEqual(p.relative_to('c:/foO/', walk_up=True), P('Bar')) - self.assertEqual(p.relative_to(P('c:/foO/baR'), walk_up=True), P()) - self.assertEqual(p.relative_to('c:/foO/baR', walk_up=True), P()) - self.assertEqual(p.relative_to('C:/Baz', walk_up=True), P('../Foo/Bar')) - self.assertEqual(p.relative_to('C:/Foo/Bar/Baz', walk_up=True), P('..')) - self.assertEqual(p.relative_to('C:/Foo/Baz', walk_up=True), P('../Bar')) - # Unrelated paths. - self.assertRaises(ValueError, p.relative_to, 'c:') - self.assertRaises(ValueError, p.relative_to, P('c:')) - self.assertRaises(ValueError, p.relative_to, P('C:/Baz')) - self.assertRaises(ValueError, p.relative_to, P('C:/Foo/Bar/Baz')) - self.assertRaises(ValueError, p.relative_to, P('C:/Foo/Baz')) - self.assertRaises(ValueError, p.relative_to, P('C:Foo')) - self.assertRaises(ValueError, p.relative_to, P('d:')) - self.assertRaises(ValueError, p.relative_to, P('d:/')) - self.assertRaises(ValueError, p.relative_to, P('/')) - self.assertRaises(ValueError, p.relative_to, P('/Foo')) - self.assertRaises(ValueError, p.relative_to, P('//C/Foo')) - self.assertRaises(ValueError, p.relative_to, 'c:', walk_up=True) - self.assertRaises(ValueError, p.relative_to, P('c:'), walk_up=True) - self.assertRaises(ValueError, p.relative_to, P('C:Foo'), walk_up=True) - self.assertRaises(ValueError, p.relative_to, P('d:'), walk_up=True) - self.assertRaises(ValueError, p.relative_to, P('d:/'), walk_up=True) - self.assertRaises(ValueError, p.relative_to, P('/'), walk_up=True) - self.assertRaises(ValueError, p.relative_to, P('/Foo'), walk_up=True) - self.assertRaises(ValueError, p.relative_to, P('//C/Foo'), walk_up=True) - # UNC paths. - p = P('//Server/Share/Foo/Bar') - self.assertEqual(p.relative_to(P('//sErver/sHare')), P('Foo/Bar')) - self.assertEqual(p.relative_to('//sErver/sHare'), P('Foo/Bar')) - self.assertEqual(p.relative_to('//sErver/sHare/'), P('Foo/Bar')) - self.assertEqual(p.relative_to(P('//sErver/sHare/Foo')), P('Bar')) - self.assertEqual(p.relative_to('//sErver/sHare/Foo'), P('Bar')) - self.assertEqual(p.relative_to('//sErver/sHare/Foo/'), P('Bar')) - self.assertEqual(p.relative_to(P('//sErver/sHare/Foo/Bar')), P()) - self.assertEqual(p.relative_to('//sErver/sHare/Foo/Bar'), P()) - self.assertEqual(p.relative_to(P('//sErver/sHare'), walk_up=True), P('Foo/Bar')) - self.assertEqual(p.relative_to('//sErver/sHare', walk_up=True), P('Foo/Bar')) - self.assertEqual(p.relative_to('//sErver/sHare/', walk_up=True), P('Foo/Bar')) - self.assertEqual(p.relative_to(P('//sErver/sHare/Foo'), walk_up=True), P('Bar')) - self.assertEqual(p.relative_to('//sErver/sHare/Foo', walk_up=True), P('Bar')) - self.assertEqual(p.relative_to('//sErver/sHare/Foo/', walk_up=True), P('Bar')) - self.assertEqual(p.relative_to(P('//sErver/sHare/Foo/Bar'), walk_up=True), P()) - self.assertEqual(p.relative_to('//sErver/sHare/Foo/Bar', walk_up=True), P()) - self.assertEqual(p.relative_to(P('//sErver/sHare/bar'), walk_up=True), P('../Foo/Bar')) - self.assertEqual(p.relative_to('//sErver/sHare/bar', walk_up=True), P('../Foo/Bar')) - # Unrelated paths. - self.assertRaises(ValueError, p.relative_to, P('/Server/Share/Foo')) - self.assertRaises(ValueError, p.relative_to, P('c:/Server/Share/Foo')) - self.assertRaises(ValueError, p.relative_to, P('//z/Share/Foo')) - self.assertRaises(ValueError, p.relative_to, P('//Server/z/Foo')) - self.assertRaises(ValueError, p.relative_to, P('/Server/Share/Foo'), walk_up=True) - self.assertRaises(ValueError, p.relative_to, P('c:/Server/Share/Foo'), walk_up=True) - self.assertRaises(ValueError, p.relative_to, P('//z/Share/Foo'), walk_up=True) - self.assertRaises(ValueError, p.relative_to, P('//Server/z/Foo'), walk_up=True) - - def test_is_relative_to(self): - P = self.cls - p = P('C:Foo/Bar') - self.assertTrue(p.is_relative_to(P('c:'))) - self.assertTrue(p.is_relative_to('c:')) - self.assertTrue(p.is_relative_to(P('c:foO'))) - self.assertTrue(p.is_relative_to('c:foO')) - self.assertTrue(p.is_relative_to('c:foO/')) - self.assertTrue(p.is_relative_to(P('c:foO/baR'))) - self.assertTrue(p.is_relative_to('c:foO/baR')) - # Unrelated paths. - self.assertFalse(p.is_relative_to(P())) - self.assertFalse(p.is_relative_to('')) - self.assertFalse(p.is_relative_to(P('d:'))) - self.assertFalse(p.is_relative_to(P('/'))) - self.assertFalse(p.is_relative_to(P('Foo'))) - self.assertFalse(p.is_relative_to(P('/Foo'))) - self.assertFalse(p.is_relative_to(P('C:/Foo'))) - self.assertFalse(p.is_relative_to(P('C:Foo/Bar/Baz'))) - self.assertFalse(p.is_relative_to(P('C:Foo/Baz'))) - p = P('C:/Foo/Bar') - self.assertTrue(p.is_relative_to(P('c:/'))) - self.assertTrue(p.is_relative_to(P('c:/foO'))) - self.assertTrue(p.is_relative_to('c:/foO/')) - self.assertTrue(p.is_relative_to(P('c:/foO/baR'))) - self.assertTrue(p.is_relative_to('c:/foO/baR')) - # Unrelated paths. - self.assertFalse(p.is_relative_to('c:')) - self.assertFalse(p.is_relative_to(P('C:/Baz'))) - self.assertFalse(p.is_relative_to(P('C:/Foo/Bar/Baz'))) - self.assertFalse(p.is_relative_to(P('C:/Foo/Baz'))) - self.assertFalse(p.is_relative_to(P('C:Foo'))) - self.assertFalse(p.is_relative_to(P('d:'))) - self.assertFalse(p.is_relative_to(P('d:/'))) - self.assertFalse(p.is_relative_to(P('/'))) - self.assertFalse(p.is_relative_to(P('/Foo'))) - self.assertFalse(p.is_relative_to(P('//C/Foo'))) - # UNC paths. - p = P('//Server/Share/Foo/Bar') - self.assertTrue(p.is_relative_to(P('//sErver/sHare'))) - self.assertTrue(p.is_relative_to('//sErver/sHare')) - self.assertTrue(p.is_relative_to('//sErver/sHare/')) - self.assertTrue(p.is_relative_to(P('//sErver/sHare/Foo'))) - self.assertTrue(p.is_relative_to('//sErver/sHare/Foo')) - self.assertTrue(p.is_relative_to('//sErver/sHare/Foo/')) - self.assertTrue(p.is_relative_to(P('//sErver/sHare/Foo/Bar'))) - self.assertTrue(p.is_relative_to('//sErver/sHare/Foo/Bar')) - # Unrelated paths. - self.assertFalse(p.is_relative_to(P('/Server/Share/Foo'))) - self.assertFalse(p.is_relative_to(P('c:/Server/Share/Foo'))) - self.assertFalse(p.is_relative_to(P('//z/Share/Foo'))) - self.assertFalse(p.is_relative_to(P('//Server/z/Foo'))) - - def test_is_absolute(self): - P = self.cls - # Under NT, only paths with both a drive and a root are absolute. - self.assertFalse(P().is_absolute()) - self.assertFalse(P('a').is_absolute()) - self.assertFalse(P('a/b/').is_absolute()) - self.assertFalse(P('/').is_absolute()) - self.assertFalse(P('/a').is_absolute()) - self.assertFalse(P('/a/b/').is_absolute()) - self.assertFalse(P('c:').is_absolute()) - self.assertFalse(P('c:a').is_absolute()) - self.assertFalse(P('c:a/b/').is_absolute()) - self.assertTrue(P('c:/').is_absolute()) - self.assertTrue(P('c:/a').is_absolute()) - self.assertTrue(P('c:/a/b/').is_absolute()) - # UNC paths are absolute by definition. - self.assertTrue(P('//a/b').is_absolute()) - self.assertTrue(P('//a/b/').is_absolute()) - self.assertTrue(P('//a/b/c').is_absolute()) - self.assertTrue(P('//a/b/c/d').is_absolute()) - - def test_join(self): - P = self.cls - p = P('C:/a/b') - pp = p.joinpath('x/y') - self.assertEqual(pp, P('C:/a/b/x/y')) - pp = p.joinpath('/x/y') - self.assertEqual(pp, P('C:/x/y')) - # Joining with a different drive => the first path is ignored, even - # if the second path is relative. - pp = p.joinpath('D:x/y') - self.assertEqual(pp, P('D:x/y')) - pp = p.joinpath('D:/x/y') - self.assertEqual(pp, P('D:/x/y')) - pp = p.joinpath('//host/share/x/y') - self.assertEqual(pp, P('//host/share/x/y')) - # Joining with the same drive => the first path is appended to if - # the second path is relative. - pp = p.joinpath('c:x/y') - self.assertEqual(pp, P('C:/a/b/x/y')) - pp = p.joinpath('c:/x/y') - self.assertEqual(pp, P('C:/x/y')) - # Joining with files with NTFS data streams => the filename should - # not be parsed as a drive letter - pp = p.joinpath(P('./d:s')) - self.assertEqual(pp, P('C:/a/b/d:s')) - pp = p.joinpath(P('./dd:s')) - self.assertEqual(pp, P('C:/a/b/dd:s')) - pp = p.joinpath(P('E:d:s')) - self.assertEqual(pp, P('E:d:s')) - # Joining onto a UNC path with no root - pp = P('//').joinpath('server') - self.assertEqual(pp, P('//server')) - pp = P('//server').joinpath('share') - self.assertEqual(pp, P('//server/share')) - pp = P('//./BootPartition').joinpath('Windows') - self.assertEqual(pp, P('//./BootPartition/Windows')) - - def test_div(self): - # Basically the same as joinpath(). - P = self.cls - p = P('C:/a/b') - self.assertEqual(p / 'x/y', P('C:/a/b/x/y')) - self.assertEqual(p / 'x' / 'y', P('C:/a/b/x/y')) - self.assertEqual(p / '/x/y', P('C:/x/y')) - self.assertEqual(p / '/x' / 'y', P('C:/x/y')) - # Joining with a different drive => the first path is ignored, even - # if the second path is relative. - self.assertEqual(p / 'D:x/y', P('D:x/y')) - self.assertEqual(p / 'D:' / 'x/y', P('D:x/y')) - self.assertEqual(p / 'D:/x/y', P('D:/x/y')) - self.assertEqual(p / 'D:' / '/x/y', P('D:/x/y')) - self.assertEqual(p / '//host/share/x/y', P('//host/share/x/y')) - # Joining with the same drive => the first path is appended to if - # the second path is relative. - self.assertEqual(p / 'c:x/y', P('C:/a/b/x/y')) - self.assertEqual(p / 'c:/x/y', P('C:/x/y')) - # Joining with files with NTFS data streams => the filename should - # not be parsed as a drive letter - self.assertEqual(p / P('./d:s'), P('C:/a/b/d:s')) - self.assertEqual(p / P('./dd:s'), P('C:/a/b/dd:s')) - self.assertEqual(p / P('E:d:s'), P('E:d:s')) - - def test_is_reserved(self): - P = self.cls - self.assertIs(False, P('').is_reserved()) - self.assertIs(False, P('/').is_reserved()) - self.assertIs(False, P('/foo/bar').is_reserved()) - # UNC paths are never reserved. - self.assertIs(False, P('//my/share/nul/con/aux').is_reserved()) - # Case-insensitive DOS-device names are reserved. - self.assertIs(True, P('nul').is_reserved()) - self.assertIs(True, P('aux').is_reserved()) - self.assertIs(True, P('prn').is_reserved()) - self.assertIs(True, P('con').is_reserved()) - self.assertIs(True, P('conin$').is_reserved()) - self.assertIs(True, P('conout$').is_reserved()) - # COM/LPT + 1-9 or + superscript 1-3 are reserved. - self.assertIs(True, P('COM1').is_reserved()) - self.assertIs(True, P('LPT9').is_reserved()) - self.assertIs(True, P('com\xb9').is_reserved()) - self.assertIs(True, P('com\xb2').is_reserved()) - self.assertIs(True, P('lpt\xb3').is_reserved()) - # DOS-device name mataching ignores characters after a dot or - # a colon and also ignores trailing spaces. - self.assertIs(True, P('NUL.txt').is_reserved()) - self.assertIs(True, P('PRN ').is_reserved()) - self.assertIs(True, P('AUX .txt').is_reserved()) - self.assertIs(True, P('COM1:bar').is_reserved()) - self.assertIs(True, P('LPT9 :bar').is_reserved()) - # DOS-device names are only matched at the beginning - # of a path component. - self.assertIs(False, P('bar.com9').is_reserved()) - self.assertIs(False, P('bar.lpt9').is_reserved()) - # Only the last path component matters. - self.assertIs(True, P('c:/baz/con/NUL').is_reserved()) - self.assertIs(False, P('c:/NUL/con/baz').is_reserved()) - - -class PurePathSubclassTest(PurePathTest): - class cls(pathlib.PurePath): - pass - - # repr() roundtripping is not supported in custom subclass. - test_repr_roundtrips = None - - -# -# Tests for the virtual classes. -# - -class PathBaseTest(PurePathBaseTest): - cls = pathlib._abc.PathBase - - def test_unsupported_operation(self): - P = self.cls - p = self.cls() - e = pathlib.UnsupportedOperation - self.assertRaises(e, p.stat) - self.assertRaises(e, p.lstat) - self.assertRaises(e, p.exists) - self.assertRaises(e, p.samefile, 'foo') - self.assertRaises(e, p.is_dir) - self.assertRaises(e, p.is_file) - self.assertRaises(e, p.is_mount) - self.assertRaises(e, p.is_symlink) - self.assertRaises(e, p.is_block_device) - self.assertRaises(e, p.is_char_device) - self.assertRaises(e, p.is_fifo) - self.assertRaises(e, p.is_socket) - self.assertRaises(e, p.open) - self.assertRaises(e, p.read_bytes) - self.assertRaises(e, p.read_text) - self.assertRaises(e, p.write_bytes, b'foo') - self.assertRaises(e, p.write_text, 'foo') - self.assertRaises(e, p.iterdir) - self.assertRaises(e, p.glob, '*') - self.assertRaises(e, p.rglob, '*') - self.assertRaises(e, lambda: list(p.walk())) - self.assertRaises(e, p.absolute) - self.assertRaises(e, P.cwd) - self.assertRaises(e, p.expanduser) - self.assertRaises(e, p.home) - self.assertRaises(e, p.readlink) - self.assertRaises(e, p.symlink_to, 'foo') - self.assertRaises(e, p.hardlink_to, 'foo') - self.assertRaises(e, p.mkdir) - self.assertRaises(e, p.touch) - self.assertRaises(e, p.rename, 'foo') - self.assertRaises(e, p.replace, 'foo') - self.assertRaises(e, p.chmod, 0o755) - self.assertRaises(e, p.lchmod, 0o755) - self.assertRaises(e, p.unlink) - self.assertRaises(e, p.rmdir) - self.assertRaises(e, p.owner) - self.assertRaises(e, p.group) - self.assertRaises(e, p.as_uri) - - def test_as_uri_common(self): - e = pathlib.UnsupportedOperation - self.assertRaises(e, self.cls().as_uri) - - def test_fspath_common(self): - self.assertRaises(TypeError, os.fspath, self.cls()) - - def test_as_bytes_common(self): - self.assertRaises(TypeError, bytes, self.cls()) - - def test_matches_path_api(self): - our_names = {name for name in dir(self.cls) if name[0] != '_'} - path_names = {name for name in dir(pathlib.Path) if name[0] != '_'} - self.assertEqual(our_names, path_names) - for attr_name in our_names: - our_attr = getattr(self.cls, attr_name) - path_attr = getattr(pathlib.Path, attr_name) - self.assertEqual(our_attr.__doc__, path_attr.__doc__) - - -class DummyPathIO(io.BytesIO): - """ - Used by DummyPath to implement `open('w')` - """ - - def __init__(self, files, path): - super().__init__() - self.files = files - self.path = path - - def close(self): - self.files[self.path] = self.getvalue() - super().close() - - -class DummyPath(pathlib._abc.PathBase): - """ - Simple implementation of PathBase that keeps files and directories in - memory. - """ - _files = {} - _directories = {} - _symlinks = {} - - def __eq__(self, other): - if not isinstance(other, DummyPath): - return NotImplemented - return str(self) == str(other) - - def __hash__(self): - return hash(str(self)) - - def stat(self, *, follow_symlinks=True): - if follow_symlinks: - path = str(self.resolve()) - else: - path = str(self.parent.resolve() / self.name) - if path in self._files: - st_mode = stat.S_IFREG - elif path in self._directories: - st_mode = stat.S_IFDIR - elif path in self._symlinks: - st_mode = stat.S_IFLNK - else: - raise FileNotFoundError(errno.ENOENT, "Not found", str(self)) - return os.stat_result((st_mode, hash(str(self)), 0, 0, 0, 0, 0, 0, 0, 0)) - - def open(self, mode='r', buffering=-1, encoding=None, - errors=None, newline=None): - if buffering != -1: - raise NotImplementedError - path_obj = self.resolve() - path = str(path_obj) - name = path_obj.name - parent = str(path_obj.parent) - if path in self._directories: - raise IsADirectoryError(errno.EISDIR, "Is a directory", path) - - text = 'b' not in mode - mode = ''.join(c for c in mode if c not in 'btU') - if mode == 'r': - if path not in self._files: - raise FileNotFoundError(errno.ENOENT, "File not found", path) - stream = io.BytesIO(self._files[path]) - elif mode == 'w': - if parent not in self._directories: - raise FileNotFoundError(errno.ENOENT, "File not found", parent) - stream = DummyPathIO(self._files, path) - self._files[path] = b'' - self._directories[parent].add(name) - else: - raise NotImplementedError - if text: - stream = io.TextIOWrapper(stream, encoding=encoding, errors=errors, newline=newline) - return stream - - def iterdir(self): - path = str(self.resolve()) - if path in self._files: - raise NotADirectoryError(errno.ENOTDIR, "Not a directory", path) - elif path in self._directories: - return (self / name for name in self._directories[path]) - else: - raise FileNotFoundError(errno.ENOENT, "File not found", path) - - def mkdir(self, mode=0o777, parents=False, exist_ok=False): - path = str(self.resolve()) - if path in self._directories: - if exist_ok: - return - else: - raise FileExistsError(errno.EEXIST, "File exists", path) - try: - if self.name: - self._directories[str(self.parent)].add(self.name) - self._directories[path] = set() - except KeyError: - if not parents: - raise FileNotFoundError(errno.ENOENT, "File not found", str(self.parent)) from None - self.parent.mkdir(parents=True, exist_ok=True) - self.mkdir(mode, parents=False, exist_ok=exist_ok) - - -class DummyPathTest(DummyPurePathTest): - """Tests for PathBase methods that use stat(), open() and iterdir().""" - - cls = DummyPath - can_symlink = False - - # (BASE) - # | - # |-- brokenLink -> non-existing - # |-- dirA - # | `-- linkC -> ../dirB - # |-- dirB - # | |-- fileB - # | `-- linkD -> ../dirB - # |-- dirC - # | |-- dirD - # | | `-- fileD - # | `-- fileC - # | `-- novel.txt - # |-- dirE # No permissions - # |-- fileA - # |-- linkA -> fileA - # |-- linkB -> dirB - # `-- brokenLinkLoop -> brokenLinkLoop - # - - def setUp(self): - super().setUp() - pathmod = self.cls.pathmod - p = self.cls(BASE) - p.mkdir(parents=True) - p.joinpath('dirA').mkdir() - p.joinpath('dirB').mkdir() - p.joinpath('dirC').mkdir() - p.joinpath('dirC', 'dirD').mkdir() - p.joinpath('dirE').mkdir() - with p.joinpath('fileA').open('wb') as f: - f.write(b"this is file A\n") - with p.joinpath('dirB', 'fileB').open('wb') as f: - f.write(b"this is file B\n") - with p.joinpath('dirC', 'fileC').open('wb') as f: - f.write(b"this is file C\n") - with p.joinpath('dirC', 'novel.txt').open('wb') as f: - f.write(b"this is a novel\n") - with p.joinpath('dirC', 'dirD', 'fileD').open('wb') as f: - f.write(b"this is file D\n") - if self.can_symlink: - p.joinpath('linkA').symlink_to('fileA') - p.joinpath('brokenLink').symlink_to('non-existing') - p.joinpath('linkB').symlink_to('dirB') - p.joinpath('dirA', 'linkC').symlink_to(pathmod.join('..', 'dirB')) - p.joinpath('dirB', 'linkD').symlink_to(pathmod.join('..', 'dirB')) - p.joinpath('brokenLinkLoop').symlink_to('brokenLinkLoop') - - def tearDown(self): - cls = self.cls - cls._files.clear() - cls._directories.clear() - cls._symlinks.clear() - - def tempdir(self): - path = self.cls(BASE).with_name('tmp-dirD') - path.mkdir() - return path - - def assertFileNotFound(self, func, *args, **kwargs): - with self.assertRaises(FileNotFoundError) as cm: - func(*args, **kwargs) - self.assertEqual(cm.exception.errno, errno.ENOENT) - - def assertEqualNormCase(self, path_a, path_b): - self.assertEqual(os.path.normcase(path_a), os.path.normcase(path_b)) - - def test_samefile(self): - fileA_path = os.path.join(BASE, 'fileA') - fileB_path = os.path.join(BASE, 'dirB', 'fileB') - p = self.cls(fileA_path) - pp = self.cls(fileA_path) - q = self.cls(fileB_path) - self.assertTrue(p.samefile(fileA_path)) - self.assertTrue(p.samefile(pp)) - self.assertFalse(p.samefile(fileB_path)) - self.assertFalse(p.samefile(q)) - # Test the non-existent file case - non_existent = os.path.join(BASE, 'foo') - r = self.cls(non_existent) - self.assertRaises(FileNotFoundError, p.samefile, r) - self.assertRaises(FileNotFoundError, p.samefile, non_existent) - self.assertRaises(FileNotFoundError, r.samefile, p) - self.assertRaises(FileNotFoundError, r.samefile, non_existent) - self.assertRaises(FileNotFoundError, r.samefile, r) - self.assertRaises(FileNotFoundError, r.samefile, non_existent) - - def test_empty_path(self): - # The empty path points to '.' - p = self.cls('') - self.assertEqual(str(p), '.') - - def test_exists(self): - P = self.cls - p = P(BASE) - self.assertIs(True, p.exists()) - self.assertIs(True, (p / 'dirA').exists()) - self.assertIs(True, (p / 'fileA').exists()) - self.assertIs(False, (p / 'fileA' / 'bah').exists()) - if self.can_symlink: - self.assertIs(True, (p / 'linkA').exists()) - self.assertIs(True, (p / 'linkB').exists()) - self.assertIs(True, (p / 'linkB' / 'fileB').exists()) - self.assertIs(False, (p / 'linkA' / 'bah').exists()) - self.assertIs(False, (p / 'brokenLink').exists()) - self.assertIs(True, (p / 'brokenLink').exists(follow_symlinks=False)) - self.assertIs(False, (p / 'foo').exists()) - self.assertIs(False, P('/xyzzy').exists()) - self.assertIs(False, P(BASE + '\udfff').exists()) - self.assertIs(False, P(BASE + '\x00').exists()) - - def test_open_common(self): - p = self.cls(BASE) - with (p / 'fileA').open('r') as f: - self.assertIsInstance(f, io.TextIOBase) - self.assertEqual(f.read(), "this is file A\n") - with (p / 'fileA').open('rb') as f: - self.assertIsInstance(f, io.BufferedIOBase) - self.assertEqual(f.read().strip(), b"this is file A") - - def test_read_write_bytes(self): - p = self.cls(BASE) - (p / 'fileA').write_bytes(b'abcdefg') - self.assertEqual((p / 'fileA').read_bytes(), b'abcdefg') - # Check that trying to write str does not truncate the file. - self.assertRaises(TypeError, (p / 'fileA').write_bytes, 'somestr') - self.assertEqual((p / 'fileA').read_bytes(), b'abcdefg') - - def test_read_write_text(self): - p = self.cls(BASE) - (p / 'fileA').write_text('äbcdefg', encoding='latin-1') - self.assertEqual((p / 'fileA').read_text( - encoding='utf-8', errors='ignore'), 'bcdefg') - # Check that trying to write bytes does not truncate the file. - self.assertRaises(TypeError, (p / 'fileA').write_text, b'somebytes') - self.assertEqual((p / 'fileA').read_text(encoding='latin-1'), 'äbcdefg') - - def test_read_text_with_newlines(self): - p = self.cls(BASE) - # Check that `\n` character change nothing - (p / 'fileA').write_bytes(b'abcde\r\nfghlk\n\rmnopq') - self.assertEqual((p / 'fileA').read_text(newline='\n'), - 'abcde\r\nfghlk\n\rmnopq') - # Check that `\r` character replaces `\n` - (p / 'fileA').write_bytes(b'abcde\r\nfghlk\n\rmnopq') - self.assertEqual((p / 'fileA').read_text(newline='\r'), - 'abcde\r\nfghlk\n\rmnopq') - # Check that `\r\n` character replaces `\n` - (p / 'fileA').write_bytes(b'abcde\r\nfghlk\n\rmnopq') - self.assertEqual((p / 'fileA').read_text(newline='\r\n'), - 'abcde\r\nfghlk\n\rmnopq') - - def test_write_text_with_newlines(self): - p = self.cls(BASE) - # Check that `\n` character change nothing - (p / 'fileA').write_text('abcde\r\nfghlk\n\rmnopq', newline='\n') - self.assertEqual((p / 'fileA').read_bytes(), - b'abcde\r\nfghlk\n\rmnopq') - # Check that `\r` character replaces `\n` - (p / 'fileA').write_text('abcde\r\nfghlk\n\rmnopq', newline='\r') - self.assertEqual((p / 'fileA').read_bytes(), - b'abcde\r\rfghlk\r\rmnopq') - # Check that `\r\n` character replaces `\n` - (p / 'fileA').write_text('abcde\r\nfghlk\n\rmnopq', newline='\r\n') - self.assertEqual((p / 'fileA').read_bytes(), - b'abcde\r\r\nfghlk\r\n\rmnopq') - # Check that no argument passed will change `\n` to `os.linesep` - os_linesep_byte = bytes(os.linesep, encoding='ascii') - (p / 'fileA').write_text('abcde\nfghlk\n\rmnopq') - self.assertEqual((p / 'fileA').read_bytes(), - b'abcde' + os_linesep_byte + b'fghlk' + os_linesep_byte + b'\rmnopq') - - def test_iterdir(self): - P = self.cls - p = P(BASE) - it = p.iterdir() - paths = set(it) - expected = ['dirA', 'dirB', 'dirC', 'dirE', 'fileA'] - if self.can_symlink: - expected += ['linkA', 'linkB', 'brokenLink', 'brokenLinkLoop'] - self.assertEqual(paths, { P(BASE, q) for q in expected }) - - def test_iterdir_symlink(self): - if not self.can_symlink: - self.skipTest("symlinks required") - # __iter__ on a symlink to a directory. - P = self.cls - p = P(BASE, 'linkB') - paths = set(p.iterdir()) - expected = { P(BASE, 'linkB', q) for q in ['fileB', 'linkD'] } - self.assertEqual(paths, expected) - - def test_iterdir_nodir(self): - # __iter__ on something that is not a directory. - p = self.cls(BASE, 'fileA') - with self.assertRaises(OSError) as cm: - p.iterdir() - # ENOENT or EINVAL under Windows, ENOTDIR otherwise - # (see issue #12802). - self.assertIn(cm.exception.errno, (errno.ENOTDIR, - errno.ENOENT, errno.EINVAL)) - - def test_glob_common(self): - def _check(glob, expected): - self.assertEqual(set(glob), { P(BASE, q) for q in expected }) - P = self.cls - p = P(BASE) - it = p.glob("fileA") - self.assertIsInstance(it, collections.abc.Iterator) - _check(it, ["fileA"]) - _check(p.glob("fileB"), []) - _check(p.glob("dir*/file*"), ["dirB/fileB", "dirC/fileC"]) - if not self.can_symlink: - _check(p.glob("*A"), ['dirA', 'fileA']) - else: - _check(p.glob("*A"), ['dirA', 'fileA', 'linkA']) - if not self.can_symlink: - _check(p.glob("*B/*"), ['dirB/fileB']) - else: - _check(p.glob("*B/*"), ['dirB/fileB', 'dirB/linkD', - 'linkB/fileB', 'linkB/linkD']) - if not self.can_symlink: - _check(p.glob("*/fileB"), ['dirB/fileB']) - else: - _check(p.glob("*/fileB"), ['dirB/fileB', 'linkB/fileB']) - if self.can_symlink: - _check(p.glob("brokenLink"), ['brokenLink']) - - if not self.can_symlink: - _check(p.glob("*/"), ["dirA/", "dirB/", "dirC/", "dirE/"]) - else: - _check(p.glob("*/"), ["dirA/", "dirB/", "dirC/", "dirE/", "linkB/"]) - - def test_glob_empty_pattern(self): - p = self.cls() - with self.assertRaisesRegex(ValueError, 'Unacceptable pattern'): - list(p.glob('')) - - def test_glob_case_sensitive(self): - P = self.cls - def _check(path, pattern, case_sensitive, expected): - actual = {str(q) for q in path.glob(pattern, case_sensitive=case_sensitive)} - expected = {str(P(BASE, q)) for q in expected} - self.assertEqual(actual, expected) - path = P(BASE) - _check(path, "DIRB/FILE*", True, []) - _check(path, "DIRB/FILE*", False, ["dirB/fileB"]) - _check(path, "dirb/file*", True, []) - _check(path, "dirb/file*", False, ["dirB/fileB"]) - - def test_glob_follow_symlinks_common(self): - if not self.can_symlink: - self.skipTest("symlinks required") - def _check(path, glob, expected): - actual = {path for path in path.glob(glob, follow_symlinks=True) - if "linkD" not in path.parent.parts} # exclude symlink loop. - self.assertEqual(actual, { P(BASE, q) for q in expected }) - P = self.cls - p = P(BASE) - _check(p, "fileB", []) - _check(p, "dir*/file*", ["dirB/fileB", "dirC/fileC"]) - _check(p, "*A", ["dirA", "fileA", "linkA"]) - _check(p, "*B/*", ["dirB/fileB", "dirB/linkD", "linkB/fileB", "linkB/linkD"]) - _check(p, "*/fileB", ["dirB/fileB", "linkB/fileB"]) - _check(p, "*/", ["dirA/", "dirB/", "dirC/", "dirE/", "linkB/"]) - _check(p, "dir*/*/..", ["dirC/dirD/..", "dirA/linkC/.."]) - _check(p, "dir*/**/", ["dirA/", "dirA/linkC/", "dirA/linkC/linkD/", "dirB/", "dirB/linkD/", - "dirC/", "dirC/dirD/", "dirE/"]) - _check(p, "dir*/**/..", ["dirA/..", "dirA/linkC/..", "dirB/..", - "dirC/..", "dirC/dirD/..", "dirE/.."]) - _check(p, "dir*/*/**/", ["dirA/linkC/", "dirA/linkC/linkD/", "dirB/linkD/", "dirC/dirD/"]) - _check(p, "dir*/*/**/..", ["dirA/linkC/..", "dirC/dirD/.."]) - _check(p, "dir*/**/fileC", ["dirC/fileC"]) - _check(p, "dir*/*/../dirD/**/", ["dirC/dirD/../dirD/"]) - _check(p, "*/dirD/**/", ["dirC/dirD/"]) - - def test_glob_no_follow_symlinks_common(self): - if not self.can_symlink: - self.skipTest("symlinks required") - def _check(path, glob, expected): - actual = {path for path in path.glob(glob, follow_symlinks=False)} - self.assertEqual(actual, { P(BASE, q) for q in expected }) - P = self.cls - p = P(BASE) - _check(p, "fileB", []) - _check(p, "dir*/file*", ["dirB/fileB", "dirC/fileC"]) - _check(p, "*A", ["dirA", "fileA", "linkA"]) - _check(p, "*B/*", ["dirB/fileB", "dirB/linkD"]) - _check(p, "*/fileB", ["dirB/fileB"]) - _check(p, "*/", ["dirA/", "dirB/", "dirC/", "dirE/"]) - _check(p, "dir*/*/..", ["dirC/dirD/.."]) - _check(p, "dir*/**/", ["dirA/", "dirB/", "dirC/", "dirC/dirD/", "dirE/"]) - _check(p, "dir*/**/..", ["dirA/..", "dirB/..", "dirC/..", "dirC/dirD/..", "dirE/.."]) - _check(p, "dir*/*/**/", ["dirC/dirD/"]) - _check(p, "dir*/*/**/..", ["dirC/dirD/.."]) - _check(p, "dir*/**/fileC", ["dirC/fileC"]) - _check(p, "dir*/*/../dirD/**/", ["dirC/dirD/../dirD/"]) - _check(p, "*/dirD/**/", ["dirC/dirD/"]) - - def test_rglob_common(self): - def _check(glob, expected): - self.assertEqual(set(glob), {P(BASE, q) for q in expected}) - P = self.cls - p = P(BASE) - it = p.rglob("fileA") - self.assertIsInstance(it, collections.abc.Iterator) - _check(it, ["fileA"]) - _check(p.rglob("fileB"), ["dirB/fileB"]) - _check(p.rglob("**/fileB"), ["dirB/fileB"]) - _check(p.rglob("*/fileA"), []) - if not self.can_symlink: - _check(p.rglob("*/fileB"), ["dirB/fileB"]) - else: - _check(p.rglob("*/fileB"), ["dirB/fileB", "dirB/linkD/fileB", - "linkB/fileB", "dirA/linkC/fileB"]) - _check(p.rglob("file*"), ["fileA", "dirB/fileB", - "dirC/fileC", "dirC/dirD/fileD"]) - if not self.can_symlink: - _check(p.rglob("*/"), [ - "dirA/", "dirB/", "dirC/", "dirC/dirD/", "dirE/", - ]) - else: - _check(p.rglob("*/"), [ - "dirA/", "dirA/linkC/", "dirB/", "dirB/linkD/", "dirC/", - "dirC/dirD/", "dirE/", "linkB/", - ]) - _check(p.rglob(""), ["./", "dirA/", "dirB/", "dirC/", "dirE/", "dirC/dirD/"]) - - p = P(BASE, "dirC") - _check(p.rglob("*"), ["dirC/fileC", "dirC/novel.txt", - "dirC/dirD", "dirC/dirD/fileD"]) - _check(p.rglob("file*"), ["dirC/fileC", "dirC/dirD/fileD"]) - _check(p.rglob("**/file*"), ["dirC/fileC", "dirC/dirD/fileD"]) - _check(p.rglob("dir*/**/"), ["dirC/dirD/"]) - _check(p.rglob("*/*"), ["dirC/dirD/fileD"]) - _check(p.rglob("*/"), ["dirC/dirD/"]) - _check(p.rglob(""), ["dirC/", "dirC/dirD/"]) - _check(p.rglob("**/"), ["dirC/", "dirC/dirD/"]) - # gh-91616, a re module regression - _check(p.rglob("*.txt"), ["dirC/novel.txt"]) - _check(p.rglob("*.*"), ["dirC/novel.txt"]) - - def test_rglob_follow_symlinks_common(self): - if not self.can_symlink: - self.skipTest("symlinks required") - def _check(path, glob, expected): - actual = {path for path in path.rglob(glob, follow_symlinks=True) - if 'linkD' not in path.parent.parts} # exclude symlink loop. - self.assertEqual(actual, { P(BASE, q) for q in expected }) - P = self.cls - p = P(BASE) - _check(p, "fileB", ["dirB/fileB", "dirA/linkC/fileB", "linkB/fileB"]) - _check(p, "*/fileA", []) - _check(p, "*/fileB", ["dirB/fileB", "dirA/linkC/fileB", "linkB/fileB"]) - _check(p, "file*", ["fileA", "dirA/linkC/fileB", "dirB/fileB", - "dirC/fileC", "dirC/dirD/fileD", "linkB/fileB"]) - _check(p, "*/", ["dirA/", "dirA/linkC/", "dirA/linkC/linkD/", "dirB/", "dirB/linkD/", - "dirC/", "dirC/dirD/", "dirE/", "linkB/", "linkB/linkD/"]) - _check(p, "", ["./", "dirA/", "dirA/linkC/", "dirA/linkC/linkD/", "dirB/", "dirB/linkD/", - "dirC/", "dirE/", "dirC/dirD/", "linkB/", "linkB/linkD/"]) - - p = P(BASE, "dirC") - _check(p, "*", ["dirC/fileC", "dirC/novel.txt", - "dirC/dirD", "dirC/dirD/fileD"]) - _check(p, "file*", ["dirC/fileC", "dirC/dirD/fileD"]) - _check(p, "*/*", ["dirC/dirD/fileD"]) - _check(p, "*/", ["dirC/dirD/"]) - _check(p, "", ["dirC/", "dirC/dirD/"]) - # gh-91616, a re module regression - _check(p, "*.txt", ["dirC/novel.txt"]) - _check(p, "*.*", ["dirC/novel.txt"]) - - def test_rglob_no_follow_symlinks_common(self): - if not self.can_symlink: - self.skipTest("symlinks required") - def _check(path, glob, expected): - actual = {path for path in path.rglob(glob, follow_symlinks=False)} - self.assertEqual(actual, { P(BASE, q) for q in expected }) - P = self.cls - p = P(BASE) - _check(p, "fileB", ["dirB/fileB"]) - _check(p, "*/fileA", []) - _check(p, "*/fileB", ["dirB/fileB"]) - _check(p, "file*", ["fileA", "dirB/fileB", "dirC/fileC", "dirC/dirD/fileD", ]) - _check(p, "*/", ["dirA/", "dirB/", "dirC/", "dirC/dirD/", "dirE/"]) - _check(p, "", ["./", "dirA/", "dirB/", "dirC/", "dirE/", "dirC/dirD/"]) - - p = P(BASE, "dirC") - _check(p, "*", ["dirC/fileC", "dirC/novel.txt", - "dirC/dirD", "dirC/dirD/fileD"]) - _check(p, "file*", ["dirC/fileC", "dirC/dirD/fileD"]) - _check(p, "*/*", ["dirC/dirD/fileD"]) - _check(p, "*/", ["dirC/dirD/"]) - _check(p, "", ["dirC/", "dirC/dirD/"]) - # gh-91616, a re module regression - _check(p, "*.txt", ["dirC/novel.txt"]) - _check(p, "*.*", ["dirC/novel.txt"]) - - def test_rglob_symlink_loop(self): - # Don't get fooled by symlink loops (Issue #26012). - if not self.can_symlink: - self.skipTest("symlinks required") - P = self.cls - p = P(BASE) - given = set(p.rglob('*')) - expect = {'brokenLink', - 'dirA', 'dirA/linkC', - 'dirB', 'dirB/fileB', 'dirB/linkD', - 'dirC', 'dirC/dirD', 'dirC/dirD/fileD', - 'dirC/fileC', 'dirC/novel.txt', - 'dirE', - 'fileA', - 'linkA', - 'linkB', - 'brokenLinkLoop', - } - self.assertEqual(given, {p / x for x in expect}) - - def test_glob_many_open_files(self): - depth = 30 - P = self.cls - p = base = P(BASE) / 'deep' - p.mkdir() - for _ in range(depth): - p /= 'd' - p.mkdir() - pattern = '/'.join(['*'] * depth) - iters = [base.glob(pattern) for j in range(100)] - for it in iters: - self.assertEqual(next(it), p) - iters = [base.rglob('d') for j in range(100)] - p = base - for i in range(depth): - p = p / 'd' - for it in iters: - self.assertEqual(next(it), p) - - def test_glob_dotdot(self): - # ".." is not special in globs. - P = self.cls - p = P(BASE) - self.assertEqual(set(p.glob("..")), { P(BASE, "..") }) - self.assertEqual(set(p.glob("../..")), { P(BASE, "..", "..") }) - self.assertEqual(set(p.glob("dirA/..")), { P(BASE, "dirA", "..") }) - self.assertEqual(set(p.glob("dirA/../file*")), { P(BASE, "dirA/../fileA") }) - self.assertEqual(set(p.glob("dirA/../file*/..")), set()) - self.assertEqual(set(p.glob("../xyzzy")), set()) - self.assertEqual(set(p.glob("xyzzy/..")), set()) - self.assertEqual(set(p.glob("/".join([".."] * 50))), { P(BASE, *[".."] * 50)}) - - def test_glob_permissions(self): - # See bpo-38894 - if not self.can_symlink: - self.skipTest("symlinks required") - P = self.cls - base = P(BASE) / 'permissions' - base.mkdir() - - for i in range(100): - link = base / f"link{i}" - if i % 2: - link.symlink_to(P(BASE, "dirE", "nonexistent")) - else: - link.symlink_to(P(BASE, "dirC")) - - self.assertEqual(len(set(base.glob("*"))), 100) - self.assertEqual(len(set(base.glob("*/"))), 50) - self.assertEqual(len(set(base.glob("*/fileC"))), 50) - self.assertEqual(len(set(base.glob("*/file*"))), 50) - - def test_glob_long_symlink(self): - # See gh-87695 - if not self.can_symlink: - self.skipTest("symlinks required") - base = self.cls(BASE) / 'long_symlink' - base.mkdir() - bad_link = base / 'bad_link' - bad_link.symlink_to("bad" * 200) - self.assertEqual(sorted(base.glob('**/*')), [bad_link]) - - def test_glob_above_recursion_limit(self): - recursion_limit = 50 - # directory_depth > recursion_limit - directory_depth = recursion_limit + 10 - base = self.cls(BASE, 'deep') - path = base.joinpath(*(['d'] * directory_depth)) - path.mkdir(parents=True) - - with set_recursion_limit(recursion_limit): - list(base.glob('**/')) - - def test_glob_recursive_no_trailing_slash(self): - P = self.cls - p = P(BASE) - with self.assertWarns(FutureWarning): - p.glob('**') - with self.assertWarns(FutureWarning): - p.glob('*/**') - with self.assertWarns(FutureWarning): - p.rglob('**') - with self.assertWarns(FutureWarning): - p.rglob('*/**') - - - def test_readlink(self): - if not self.can_symlink: - self.skipTest("symlinks required") - P = self.cls(BASE) - self.assertEqual((P / 'linkA').readlink(), self.cls('fileA')) - self.assertEqual((P / 'brokenLink').readlink(), - self.cls('non-existing')) - self.assertEqual((P / 'linkB').readlink(), self.cls('dirB')) - self.assertEqual((P / 'linkB' / 'linkD').readlink(), self.cls('../dirB')) - with self.assertRaises(OSError): - (P / 'fileA').readlink() - - @unittest.skipIf(hasattr(os, "readlink"), "os.readlink() is present") - def test_readlink_unsupported(self): - P = self.cls(BASE) - p = P / 'fileA' - with self.assertRaises(pathlib.UnsupportedOperation): - q.readlink(p) - - def _check_resolve(self, p, expected, strict=True): - q = p.resolve(strict) - self.assertEqual(q, expected) - - # This can be used to check both relative and absolute resolutions. - _check_resolve_relative = _check_resolve_absolute = _check_resolve - - def test_resolve_common(self): - if not self.can_symlink: - self.skipTest("symlinks required") - P = self.cls - p = P(BASE, 'foo') - with self.assertRaises(OSError) as cm: - p.resolve(strict=True) - self.assertEqual(cm.exception.errno, errno.ENOENT) - # Non-strict - self.assertEqualNormCase(str(p.resolve(strict=False)), - os.path.join(BASE, 'foo')) - p = P(BASE, 'foo', 'in', 'spam') - self.assertEqualNormCase(str(p.resolve(strict=False)), - os.path.join(BASE, 'foo', 'in', 'spam')) - p = P(BASE, '..', 'foo', 'in', 'spam') - self.assertEqualNormCase(str(p.resolve(strict=False)), - os.path.abspath(os.path.join('foo', 'in', 'spam'))) - # These are all relative symlinks. - p = P(BASE, 'dirB', 'fileB') - self._check_resolve_relative(p, p) - p = P(BASE, 'linkA') - self._check_resolve_relative(p, P(BASE, 'fileA')) - p = P(BASE, 'dirA', 'linkC', 'fileB') - self._check_resolve_relative(p, P(BASE, 'dirB', 'fileB')) - p = P(BASE, 'dirB', 'linkD', 'fileB') - self._check_resolve_relative(p, P(BASE, 'dirB', 'fileB')) - # Non-strict - p = P(BASE, 'dirA', 'linkC', 'fileB', 'foo', 'in', 'spam') - self._check_resolve_relative(p, P(BASE, 'dirB', 'fileB', 'foo', 'in', - 'spam'), False) - p = P(BASE, 'dirA', 'linkC', '..', 'foo', 'in', 'spam') - if os.name == 'nt' and isinstance(p, pathlib.Path): - # In Windows, if linkY points to dirB, 'dirA\linkY\..' - # resolves to 'dirA' without resolving linkY first. - self._check_resolve_relative(p, P(BASE, 'dirA', 'foo', 'in', - 'spam'), False) - else: - # In Posix, if linkY points to dirB, 'dirA/linkY/..' - # resolves to 'dirB/..' first before resolving to parent of dirB. - self._check_resolve_relative(p, P(BASE, 'foo', 'in', 'spam'), False) - # Now create absolute symlinks. - d = self.tempdir() - P(BASE, 'dirA', 'linkX').symlink_to(d) - P(BASE, str(d), 'linkY').symlink_to(join('dirB')) - p = P(BASE, 'dirA', 'linkX', 'linkY', 'fileB') - self._check_resolve_absolute(p, P(BASE, 'dirB', 'fileB')) - # Non-strict - p = P(BASE, 'dirA', 'linkX', 'linkY', 'foo', 'in', 'spam') - self._check_resolve_relative(p, P(BASE, 'dirB', 'foo', 'in', 'spam'), - False) - p = P(BASE, 'dirA', 'linkX', 'linkY', '..', 'foo', 'in', 'spam') - if os.name == 'nt' and isinstance(p, pathlib.Path): - # In Windows, if linkY points to dirB, 'dirA\linkY\..' - # resolves to 'dirA' without resolving linkY first. - self._check_resolve_relative(p, P(d, 'foo', 'in', 'spam'), False) - else: - # In Posix, if linkY points to dirB, 'dirA/linkY/..' - # resolves to 'dirB/..' first before resolving to parent of dirB. - self._check_resolve_relative(p, P(BASE, 'foo', 'in', 'spam'), False) - - def test_resolve_dot(self): - # See http://web.archive.org/web/20200623062557/https://bitbucket.org/pitrou/pathlib/issues/9/ - if not self.can_symlink: - self.skipTest("symlinks required") - p = self.cls(BASE) - p.joinpath('0').symlink_to('.', target_is_directory=True) - p.joinpath('1').symlink_to(os.path.join('0', '0'), target_is_directory=True) - p.joinpath('2').symlink_to(os.path.join('1', '1'), target_is_directory=True) - q = p / '2' - self.assertEqual(q.resolve(strict=True), p) - r = q / '3' / '4' - self.assertRaises(FileNotFoundError, r.resolve, strict=True) - # Non-strict - self.assertEqual(r.resolve(strict=False), p / '3' / '4') - - def _check_symlink_loop(self, *args): - path = self.cls(*args) - with self.assertRaises(OSError) as cm: - path.resolve(strict=True) - self.assertEqual(cm.exception.errno, errno.ELOOP) - - def test_resolve_loop(self): - if not self.can_symlink: - self.skipTest("symlinks required") - if os.name == 'nt' and issubclass(self.cls, pathlib.Path): - self.skipTest("symlink loops work differently with concrete Windows paths") - # Loops with relative symlinks. - self.cls(BASE, 'linkX').symlink_to('linkX/inside') - self._check_symlink_loop(BASE, 'linkX') - self.cls(BASE, 'linkY').symlink_to('linkY') - self._check_symlink_loop(BASE, 'linkY') - self.cls(BASE, 'linkZ').symlink_to('linkZ/../linkZ') - self._check_symlink_loop(BASE, 'linkZ') - # Non-strict - p = self.cls(BASE, 'linkZ', 'foo') - self.assertEqual(p.resolve(strict=False), p) - # Loops with absolute symlinks. - self.cls(BASE, 'linkU').symlink_to(join('linkU/inside')) - self._check_symlink_loop(BASE, 'linkU') - self.cls(BASE, 'linkV').symlink_to(join('linkV')) - self._check_symlink_loop(BASE, 'linkV') - self.cls(BASE, 'linkW').symlink_to(join('linkW/../linkW')) - self._check_symlink_loop(BASE, 'linkW') - # Non-strict - q = self.cls(BASE, 'linkW', 'foo') - self.assertEqual(q.resolve(strict=False), q) - - def test_stat(self): - statA = self.cls(BASE).joinpath('fileA').stat() - statB = self.cls(BASE).joinpath('dirB', 'fileB').stat() - statC = self.cls(BASE).joinpath('dirC').stat() - # st_mode: files are the same, directory differs. - self.assertIsInstance(statA.st_mode, int) - self.assertEqual(statA.st_mode, statB.st_mode) - self.assertNotEqual(statA.st_mode, statC.st_mode) - self.assertNotEqual(statB.st_mode, statC.st_mode) - # st_ino: all different, - self.assertIsInstance(statA.st_ino, int) - self.assertNotEqual(statA.st_ino, statB.st_ino) - self.assertNotEqual(statA.st_ino, statC.st_ino) - self.assertNotEqual(statB.st_ino, statC.st_ino) - # st_dev: all the same. - self.assertIsInstance(statA.st_dev, int) - self.assertEqual(statA.st_dev, statB.st_dev) - self.assertEqual(statA.st_dev, statC.st_dev) - # other attributes not used by pathlib. - - def test_stat_no_follow_symlinks(self): - if not self.can_symlink: - self.skipTest("symlinks required") - p = self.cls(BASE) / 'linkA' - st = p.stat() - self.assertNotEqual(st, p.stat(follow_symlinks=False)) - - def test_stat_no_follow_symlinks_nosymlink(self): - p = self.cls(BASE) / 'fileA' - st = p.stat() - self.assertEqual(st, p.stat(follow_symlinks=False)) - - def test_lstat(self): - if not self.can_symlink: - self.skipTest("symlinks required") - p = self.cls(BASE)/ 'linkA' - st = p.stat() - self.assertNotEqual(st, p.lstat()) - - def test_lstat_nosymlink(self): - p = self.cls(BASE) / 'fileA' - st = p.stat() - self.assertEqual(st, p.lstat()) - - def test_is_dir(self): - P = self.cls(BASE) - self.assertTrue((P / 'dirA').is_dir()) - self.assertFalse((P / 'fileA').is_dir()) - self.assertFalse((P / 'non-existing').is_dir()) - self.assertFalse((P / 'fileA' / 'bah').is_dir()) - if self.can_symlink: - self.assertFalse((P / 'linkA').is_dir()) - self.assertTrue((P / 'linkB').is_dir()) - self.assertFalse((P/ 'brokenLink').is_dir()) - self.assertFalse((P / 'dirA\udfff').is_dir()) - self.assertFalse((P / 'dirA\x00').is_dir()) - - def test_is_dir_no_follow_symlinks(self): - P = self.cls(BASE) - self.assertTrue((P / 'dirA').is_dir(follow_symlinks=False)) - self.assertFalse((P / 'fileA').is_dir(follow_symlinks=False)) - self.assertFalse((P / 'non-existing').is_dir(follow_symlinks=False)) - self.assertFalse((P / 'fileA' / 'bah').is_dir(follow_symlinks=False)) - if self.can_symlink: - self.assertFalse((P / 'linkA').is_dir(follow_symlinks=False)) - self.assertFalse((P / 'linkB').is_dir(follow_symlinks=False)) - self.assertFalse((P/ 'brokenLink').is_dir(follow_symlinks=False)) - self.assertFalse((P / 'dirA\udfff').is_dir(follow_symlinks=False)) - self.assertFalse((P / 'dirA\x00').is_dir(follow_symlinks=False)) - - def test_is_file(self): - P = self.cls(BASE) - self.assertTrue((P / 'fileA').is_file()) - self.assertFalse((P / 'dirA').is_file()) - self.assertFalse((P / 'non-existing').is_file()) - self.assertFalse((P / 'fileA' / 'bah').is_file()) - if self.can_symlink: - self.assertTrue((P / 'linkA').is_file()) - self.assertFalse((P / 'linkB').is_file()) - self.assertFalse((P/ 'brokenLink').is_file()) - self.assertFalse((P / 'fileA\udfff').is_file()) - self.assertFalse((P / 'fileA\x00').is_file()) - - def test_is_file_no_follow_symlinks(self): - P = self.cls(BASE) - self.assertTrue((P / 'fileA').is_file(follow_symlinks=False)) - self.assertFalse((P / 'dirA').is_file(follow_symlinks=False)) - self.assertFalse((P / 'non-existing').is_file(follow_symlinks=False)) - self.assertFalse((P / 'fileA' / 'bah').is_file(follow_symlinks=False)) - if self.can_symlink: - self.assertFalse((P / 'linkA').is_file(follow_symlinks=False)) - self.assertFalse((P / 'linkB').is_file(follow_symlinks=False)) - self.assertFalse((P/ 'brokenLink').is_file(follow_symlinks=False)) - self.assertFalse((P / 'fileA\udfff').is_file(follow_symlinks=False)) - self.assertFalse((P / 'fileA\x00').is_file(follow_symlinks=False)) - - def test_is_mount(self): - P = self.cls(BASE) - self.assertFalse((P / 'fileA').is_mount()) - self.assertFalse((P / 'dirA').is_mount()) - self.assertFalse((P / 'non-existing').is_mount()) - self.assertFalse((P / 'fileA' / 'bah').is_mount()) - if self.can_symlink: - self.assertFalse((P / 'linkA').is_mount()) - - def test_is_symlink(self): - P = self.cls(BASE) - self.assertFalse((P / 'fileA').is_symlink()) - self.assertFalse((P / 'dirA').is_symlink()) - self.assertFalse((P / 'non-existing').is_symlink()) - self.assertFalse((P / 'fileA' / 'bah').is_symlink()) - if self.can_symlink: - self.assertTrue((P / 'linkA').is_symlink()) - self.assertTrue((P / 'linkB').is_symlink()) - self.assertTrue((P/ 'brokenLink').is_symlink()) - self.assertIs((P / 'fileA\udfff').is_file(), False) - self.assertIs((P / 'fileA\x00').is_file(), False) - if self.can_symlink: - self.assertIs((P / 'linkA\udfff').is_file(), False) - self.assertIs((P / 'linkA\x00').is_file(), False) - - def test_is_junction_false(self): - P = self.cls(BASE) - self.assertFalse((P / 'fileA').is_junction()) - self.assertFalse((P / 'dirA').is_junction()) - self.assertFalse((P / 'non-existing').is_junction()) - self.assertFalse((P / 'fileA' / 'bah').is_junction()) - self.assertFalse((P / 'fileA\udfff').is_junction()) - self.assertFalse((P / 'fileA\x00').is_junction()) - - def test_is_fifo_false(self): - P = self.cls(BASE) - self.assertFalse((P / 'fileA').is_fifo()) - self.assertFalse((P / 'dirA').is_fifo()) - self.assertFalse((P / 'non-existing').is_fifo()) - self.assertFalse((P / 'fileA' / 'bah').is_fifo()) - self.assertIs((P / 'fileA\udfff').is_fifo(), False) - self.assertIs((P / 'fileA\x00').is_fifo(), False) - - def test_is_socket_false(self): - P = self.cls(BASE) - self.assertFalse((P / 'fileA').is_socket()) - self.assertFalse((P / 'dirA').is_socket()) - self.assertFalse((P / 'non-existing').is_socket()) - self.assertFalse((P / 'fileA' / 'bah').is_socket()) - self.assertIs((P / 'fileA\udfff').is_socket(), False) - self.assertIs((P / 'fileA\x00').is_socket(), False) - - def test_is_block_device_false(self): - P = self.cls(BASE) - self.assertFalse((P / 'fileA').is_block_device()) - self.assertFalse((P / 'dirA').is_block_device()) - self.assertFalse((P / 'non-existing').is_block_device()) - self.assertFalse((P / 'fileA' / 'bah').is_block_device()) - self.assertIs((P / 'fileA\udfff').is_block_device(), False) - self.assertIs((P / 'fileA\x00').is_block_device(), False) - - def test_is_char_device_false(self): - P = self.cls(BASE) - self.assertFalse((P / 'fileA').is_char_device()) - self.assertFalse((P / 'dirA').is_char_device()) - self.assertFalse((P / 'non-existing').is_char_device()) - self.assertFalse((P / 'fileA' / 'bah').is_char_device()) - self.assertIs((P / 'fileA\udfff').is_char_device(), False) - self.assertIs((P / 'fileA\x00').is_char_device(), False) - - def test_pickling_common(self): - p = self.cls(BASE, 'fileA') - for proto in range(0, pickle.HIGHEST_PROTOCOL + 1): - dumped = pickle.dumps(p, proto) - pp = pickle.loads(dumped) - self.assertEqual(pp.stat(), p.stat()) - - def test_parts_interning(self): - P = self.cls - p = P('/usr/bin/foo') - q = P('/usr/local/bin') - # 'usr' - self.assertIs(p.parts[1], q.parts[1]) - # 'bin' - self.assertIs(p.parts[2], q.parts[3]) - - def _check_complex_symlinks(self, link0_target): - if not self.can_symlink: - self.skipTest("symlinks required") - - # Test solving a non-looping chain of symlinks (issue #19887). - P = self.cls(BASE) - P.joinpath('link1').symlink_to(os.path.join('link0', 'link0'), target_is_directory=True) - P.joinpath('link2').symlink_to(os.path.join('link1', 'link1'), target_is_directory=True) - P.joinpath('link3').symlink_to(os.path.join('link2', 'link2'), target_is_directory=True) - P.joinpath('link0').symlink_to(link0_target, target_is_directory=True) - - # Resolve absolute paths. - p = (P / 'link0').resolve() - self.assertEqual(p, P) - self.assertEqualNormCase(str(p), BASE) - p = (P / 'link1').resolve() - self.assertEqual(p, P) - self.assertEqualNormCase(str(p), BASE) - p = (P / 'link2').resolve() - self.assertEqual(p, P) - self.assertEqualNormCase(str(p), BASE) - p = (P / 'link3').resolve() - self.assertEqual(p, P) - self.assertEqualNormCase(str(p), BASE) - - # Resolve relative paths. - try: - self.cls().absolute() - except pathlib.UnsupportedOperation: - return - old_path = os.getcwd() - os.chdir(BASE) - try: - p = self.cls('link0').resolve() - self.assertEqual(p, P) - self.assertEqualNormCase(str(p), BASE) - p = self.cls('link1').resolve() - self.assertEqual(p, P) - self.assertEqualNormCase(str(p), BASE) - p = self.cls('link2').resolve() - self.assertEqual(p, P) - self.assertEqualNormCase(str(p), BASE) - p = self.cls('link3').resolve() - self.assertEqual(p, P) - self.assertEqualNormCase(str(p), BASE) - finally: - os.chdir(old_path) - - def test_complex_symlinks_absolute(self): - self._check_complex_symlinks(BASE) - - def test_complex_symlinks_relative(self): - self._check_complex_symlinks('.') - - def test_complex_symlinks_relative_dot_dot(self): - self._check_complex_symlinks(os.path.join('dirA', '..')) - - def setUpWalk(self): - # Build: - # TESTFN/ - # TEST1/ a file kid and two directory kids - # tmp1 - # SUB1/ a file kid and a directory kid - # tmp2 - # SUB11/ no kids - # SUB2/ a file kid and a dirsymlink kid - # tmp3 - # link/ a symlink to TEST2 - # broken_link - # broken_link2 - # TEST2/ - # tmp4 a lone file - self.walk_path = self.cls(BASE, "TEST1") - self.sub1_path = self.walk_path / "SUB1" - self.sub11_path = self.sub1_path / "SUB11" - self.sub2_path = self.walk_path / "SUB2" - tmp1_path = self.walk_path / "tmp1" - tmp2_path = self.sub1_path / "tmp2" - tmp3_path = self.sub2_path / "tmp3" - self.link_path = self.sub2_path / "link" - t2_path = self.cls(BASE, "TEST2") - tmp4_path = self.cls(BASE, "TEST2", "tmp4") - broken_link_path = self.sub2_path / "broken_link" - broken_link2_path = self.sub2_path / "broken_link2" - - self.sub11_path.mkdir(parents=True) - self.sub2_path.mkdir(parents=True) - t2_path.mkdir(parents=True) - - for path in tmp1_path, tmp2_path, tmp3_path, tmp4_path: - with path.open("w", encoding='utf-8') as f: - f.write(f"I'm {path} and proud of it. Blame test_pathlib.\n") - - if self.can_symlink: - self.link_path.symlink_to(t2_path) - broken_link_path.symlink_to('broken') - broken_link2_path.symlink_to(self.cls('tmp3', 'broken')) - self.sub2_tree = (self.sub2_path, [], ["broken_link", "broken_link2", "link", "tmp3"]) - else: - self.sub2_tree = (self.sub2_path, [], ["tmp3"]) - - def test_walk_topdown(self): - self.setUpWalk() - walker = self.walk_path.walk() - entry = next(walker) - entry[1].sort() # Ensure we visit SUB1 before SUB2 - self.assertEqual(entry, (self.walk_path, ["SUB1", "SUB2"], ["tmp1"])) - entry = next(walker) - self.assertEqual(entry, (self.sub1_path, ["SUB11"], ["tmp2"])) - entry = next(walker) - self.assertEqual(entry, (self.sub11_path, [], [])) - entry = next(walker) - entry[1].sort() - entry[2].sort() - self.assertEqual(entry, self.sub2_tree) - with self.assertRaises(StopIteration): - next(walker) - - def test_walk_prune(self): - self.setUpWalk() - # Prune the search. - all = [] - for root, dirs, files in self.walk_path.walk(): - all.append((root, dirs, files)) - if 'SUB1' in dirs: - # Note that this also mutates the dirs we appended to all! - dirs.remove('SUB1') - - self.assertEqual(len(all), 2) - self.assertEqual(all[0], (self.walk_path, ["SUB2"], ["tmp1"])) - - all[1][-1].sort() - all[1][1].sort() - self.assertEqual(all[1], self.sub2_tree) - - def test_walk_bottom_up(self): - self.setUpWalk() - seen_testfn = seen_sub1 = seen_sub11 = seen_sub2 = False - for path, dirnames, filenames in self.walk_path.walk(top_down=False): - if path == self.walk_path: - self.assertFalse(seen_testfn) - self.assertTrue(seen_sub1) - self.assertTrue(seen_sub2) - self.assertEqual(sorted(dirnames), ["SUB1", "SUB2"]) - self.assertEqual(filenames, ["tmp1"]) - seen_testfn = True - elif path == self.sub1_path: - self.assertFalse(seen_testfn) - self.assertFalse(seen_sub1) - self.assertTrue(seen_sub11) - self.assertEqual(dirnames, ["SUB11"]) - self.assertEqual(filenames, ["tmp2"]) - seen_sub1 = True - elif path == self.sub11_path: - self.assertFalse(seen_sub1) - self.assertFalse(seen_sub11) - self.assertEqual(dirnames, []) - self.assertEqual(filenames, []) - seen_sub11 = True - elif path == self.sub2_path: - self.assertFalse(seen_testfn) - self.assertFalse(seen_sub2) - self.assertEqual(sorted(dirnames), sorted(self.sub2_tree[1])) - self.assertEqual(sorted(filenames), sorted(self.sub2_tree[2])) - seen_sub2 = True - else: - raise AssertionError(f"Unexpected path: {path}") - self.assertTrue(seen_testfn) - - def test_walk_follow_symlinks(self): - if not self.can_symlink: - self.skipTest("symlinks required") - self.setUpWalk() - walk_it = self.walk_path.walk(follow_symlinks=True) - for root, dirs, files in walk_it: - if root == self.link_path: - self.assertEqual(dirs, []) - self.assertEqual(files, ["tmp4"]) - break - else: - self.fail("Didn't follow symlink with follow_symlinks=True") - - def test_walk_symlink_location(self): - if not self.can_symlink: - self.skipTest("symlinks required") - self.setUpWalk() - # Tests whether symlinks end up in filenames or dirnames depending - # on the `follow_symlinks` argument. - walk_it = self.walk_path.walk(follow_symlinks=False) - for root, dirs, files in walk_it: - if root == self.sub2_path: - self.assertIn("link", files) - break - else: - self.fail("symlink not found") - - walk_it = self.walk_path.walk(follow_symlinks=True) - for root, dirs, files in walk_it: - if root == self.sub2_path: - self.assertIn("link", dirs) - break - else: - self.fail("symlink not found") - - def test_walk_above_recursion_limit(self): - recursion_limit = 40 - # directory_depth > recursion_limit - directory_depth = recursion_limit + 10 - base = self.cls(BASE, 'deep') - path = base.joinpath(*(['d'] * directory_depth)) - path.mkdir(parents=True) - - with set_recursion_limit(recursion_limit): - list(base.walk()) - list(base.walk(top_down=False)) - -class DummyPathWithSymlinks(DummyPath): - def readlink(self): - path = str(self.parent.resolve() / self.name) - if path in self._symlinks: - return self.with_segments(self._symlinks[path]) - elif path in self._files or path in self._directories: - raise OSError(errno.EINVAL, "Not a symlink", path) - else: - raise FileNotFoundError(errno.ENOENT, "File not found", path) - - def symlink_to(self, target, target_is_directory=False): - self._directories[str(self.parent)].add(self.name) - self._symlinks[str(self)] = str(target) - - -class DummyPathWithSymlinksTest(DummyPathTest): - cls = DummyPathWithSymlinks - can_symlink = True - - -# -# Tests for the concrete classes. -# - -class PathTest(DummyPathTest, PurePathTest): - """Tests for the FS-accessing functionalities of the Path classes.""" - cls = pathlib.Path - can_symlink = os_helper.can_symlink() - - def setUp(self): - super().setUp() - os.chmod(join('dirE'), 0) - - def tearDown(self): - os.chmod(join('dirE'), 0o777) - os_helper.rmtree(BASE) - - def tempdir(self): - d = os_helper._longpath(tempfile.mkdtemp(suffix='-dirD', - dir=os.getcwd())) - self.addCleanup(os_helper.rmtree, d) - return d - - def test_concrete_class(self): - if self.cls is pathlib.Path: - expected = pathlib.WindowsPath if os.name == 'nt' else pathlib.PosixPath - else: - expected = self.cls - p = self.cls('a') - self.assertIs(type(p), expected) - - def test_unsupported_pathmod(self): - if self.cls.pathmod is os.path: - self.skipTest("path flavour is supported") - else: - self.assertRaises(pathlib.UnsupportedOperation, self.cls) - - def _test_cwd(self, p): - q = self.cls(os.getcwd()) - self.assertEqual(p, q) - self.assertEqualNormCase(str(p), str(q)) - self.assertIs(type(p), type(q)) - self.assertTrue(p.is_absolute()) - - def test_cwd(self): - p = self.cls.cwd() - self._test_cwd(p) - - def test_absolute_common(self): - P = self.cls - - with mock.patch("os.getcwd") as getcwd: - getcwd.return_value = BASE - - # Simple relative paths. - self.assertEqual(str(P().absolute()), BASE) - self.assertEqual(str(P('.').absolute()), BASE) - self.assertEqual(str(P('a').absolute()), os.path.join(BASE, 'a')) - self.assertEqual(str(P('a', 'b', 'c').absolute()), os.path.join(BASE, 'a', 'b', 'c')) - - # Symlinks should not be resolved. - self.assertEqual(str(P('linkB', 'fileB').absolute()), os.path.join(BASE, 'linkB', 'fileB')) - self.assertEqual(str(P('brokenLink').absolute()), os.path.join(BASE, 'brokenLink')) - self.assertEqual(str(P('brokenLinkLoop').absolute()), os.path.join(BASE, 'brokenLinkLoop')) - - # '..' entries should be preserved and not normalised. - self.assertEqual(str(P('..').absolute()), os.path.join(BASE, '..')) - self.assertEqual(str(P('a', '..').absolute()), os.path.join(BASE, 'a', '..')) - self.assertEqual(str(P('..', 'b').absolute()), os.path.join(BASE, '..', 'b')) - - def _test_home(self, p): - q = self.cls(os.path.expanduser('~')) - self.assertEqual(p, q) - self.assertEqualNormCase(str(p), str(q)) - self.assertIs(type(p), type(q)) - self.assertTrue(p.is_absolute()) - - @unittest.skipIf( - pwd is None, reason="Test requires pwd module to get homedir." - ) - def test_home(self): - with os_helper.EnvironmentVarGuard() as env: - self._test_home(self.cls.home()) - - env.clear() - env['USERPROFILE'] = os.path.join(BASE, 'userprofile') - self._test_home(self.cls.home()) - - # bpo-38883: ignore `HOME` when set on windows - env['HOME'] = os.path.join(BASE, 'home') - self._test_home(self.cls.home()) - - @unittest.skipIf(is_wasi, "WASI has no user accounts.") - def test_expanduser_common(self): - P = self.cls - p = P('~') - self.assertEqual(p.expanduser(), P(os.path.expanduser('~'))) - p = P('foo') - self.assertEqual(p.expanduser(), p) - p = P('/~') - self.assertEqual(p.expanduser(), p) - p = P('../~') - self.assertEqual(p.expanduser(), p) - p = P(P('').absolute().anchor) / '~' - self.assertEqual(p.expanduser(), p) - p = P('~/a:b') - self.assertEqual(p.expanduser(), P(os.path.expanduser('~'), './a:b')) - - def test_with_segments(self): - class P(self.cls): - def __init__(self, *pathsegments, session_id): - super().__init__(*pathsegments) - self.session_id = session_id - - def with_segments(self, *pathsegments): - return type(self)(*pathsegments, session_id=self.session_id) - p = P(BASE, session_id=42) - self.assertEqual(42, p.absolute().session_id) - self.assertEqual(42, p.resolve().session_id) - if not is_wasi: # WASI has no user accounts. - self.assertEqual(42, p.with_segments('~').expanduser().session_id) - self.assertEqual(42, (p / 'fileA').rename(p / 'fileB').session_id) - self.assertEqual(42, (p / 'fileB').replace(p / 'fileA').session_id) - if self.can_symlink: - self.assertEqual(42, (p / 'linkA').readlink().session_id) - for path in p.iterdir(): - self.assertEqual(42, path.session_id) - for path in p.glob('*'): - self.assertEqual(42, path.session_id) - for path in p.rglob('*'): - self.assertEqual(42, path.session_id) - for dirpath, dirnames, filenames in p.walk(): - self.assertEqual(42, dirpath.session_id) - - def test_open_unbuffered(self): - p = self.cls(BASE) - with (p / 'fileA').open('rb', buffering=0) as f: - self.assertIsInstance(f, io.RawIOBase) - self.assertEqual(f.read().strip(), b"this is file A") - - def test_resolve_nonexist_relative_issue38671(self): - p = self.cls('non', 'exist') - - old_cwd = os.getcwd() - os.chdir(BASE) - try: - self.assertEqual(p.resolve(), self.cls(BASE, p)) - finally: - os.chdir(old_cwd) - - @os_helper.skip_unless_working_chmod - def test_chmod(self): - p = self.cls(BASE) / 'fileA' - mode = p.stat().st_mode - # Clear writable bit. - new_mode = mode & ~0o222 - p.chmod(new_mode) - self.assertEqual(p.stat().st_mode, new_mode) - # Set writable bit. - new_mode = mode | 0o222 - p.chmod(new_mode) - self.assertEqual(p.stat().st_mode, new_mode) - - # On Windows, os.chmod does not follow symlinks (issue #15411) - @only_posix - @os_helper.skip_unless_working_chmod - def test_chmod_follow_symlinks_true(self): - p = self.cls(BASE) / 'linkA' - q = p.resolve() - mode = q.stat().st_mode - # Clear writable bit. - new_mode = mode & ~0o222 - p.chmod(new_mode, follow_symlinks=True) - self.assertEqual(q.stat().st_mode, new_mode) - # Set writable bit - new_mode = mode | 0o222 - p.chmod(new_mode, follow_symlinks=True) - self.assertEqual(q.stat().st_mode, new_mode) - - # XXX also need a test for lchmod. - - def _get_pw_name_or_skip_test(self, uid): - try: - return pwd.getpwuid(uid).pw_name - except KeyError: - self.skipTest( - "user %d doesn't have an entry in the system database" % uid) - - @unittest.skipUnless(pwd, "the pwd module is needed for this test") - def test_owner(self): - p = self.cls(BASE) / 'fileA' - expected_uid = p.stat().st_uid - expected_name = self._get_pw_name_or_skip_test(expected_uid) - - self.assertEqual(expected_name, p.owner()) - - @unittest.skipUnless(pwd, "the pwd module is needed for this test") - @unittest.skipUnless(root_in_posix, "test needs root privilege") - def test_owner_no_follow_symlinks(self): - all_users = [u.pw_uid for u in pwd.getpwall()] - if len(all_users) < 2: - self.skipTest("test needs more than one user") - - target = self.cls(BASE) / 'fileA' - link = self.cls(BASE) / 'linkA' - - uid_1, uid_2 = all_users[:2] - os.chown(target, uid_1, -1) - os.chown(link, uid_2, -1, follow_symlinks=False) - - expected_uid = link.stat(follow_symlinks=False).st_uid - expected_name = self._get_pw_name_or_skip_test(expected_uid) - - self.assertEqual(expected_uid, uid_2) - self.assertEqual(expected_name, link.owner(follow_symlinks=False)) - - def _get_gr_name_or_skip_test(self, gid): - try: - return grp.getgrgid(gid).gr_name - except KeyError: - self.skipTest( - "group %d doesn't have an entry in the system database" % gid) - - @unittest.skipUnless(grp, "the grp module is needed for this test") - def test_group(self): - p = self.cls(BASE) / 'fileA' - expected_gid = p.stat().st_gid - expected_name = self._get_gr_name_or_skip_test(expected_gid) - - self.assertEqual(expected_name, p.group()) - - @unittest.skipUnless(grp, "the grp module is needed for this test") - @unittest.skipUnless(root_in_posix, "test needs root privilege") - def test_group_no_follow_symlinks(self): - all_groups = [g.gr_gid for g in grp.getgrall()] - if len(all_groups) < 2: - self.skipTest("test needs more than one group") - - target = self.cls(BASE) / 'fileA' - link = self.cls(BASE) / 'linkA' - - gid_1, gid_2 = all_groups[:2] - os.chown(target, -1, gid_1) - os.chown(link, -1, gid_2, follow_symlinks=False) - - expected_gid = link.stat(follow_symlinks=False).st_gid - expected_name = self._get_pw_name_or_skip_test(expected_gid) - - self.assertEqual(expected_gid, gid_2) - self.assertEqual(expected_name, link.group(follow_symlinks=False)) - - def test_unlink(self): - p = self.cls(BASE) / 'fileA' - p.unlink() - self.assertFileNotFound(p.stat) - self.assertFileNotFound(p.unlink) - - def test_unlink_missing_ok(self): - p = self.cls(BASE) / 'fileAAA' - self.assertFileNotFound(p.unlink) - p.unlink(missing_ok=True) - - def test_rmdir(self): - p = self.cls(BASE) / 'dirA' - for q in p.iterdir(): - q.unlink() - p.rmdir() - self.assertFileNotFound(p.stat) - self.assertFileNotFound(p.unlink) - - @unittest.skipUnless(hasattr(os, "link"), "os.link() is not present") - def test_hardlink_to(self): - P = self.cls(BASE) - target = P / 'fileA' - size = target.stat().st_size - # linking to another path. - link = P / 'dirA' / 'fileAA' - link.hardlink_to(target) - self.assertEqual(link.stat().st_size, size) - self.assertTrue(os.path.samefile(target, link)) - self.assertTrue(target.exists()) - # Linking to a str of a relative path. - link2 = P / 'dirA' / 'fileAAA' - target2 = rel_join('fileA') - link2.hardlink_to(target2) - self.assertEqual(os.stat(target2).st_size, size) - self.assertTrue(link2.exists()) - - @unittest.skipIf(hasattr(os, "link"), "os.link() is present") - def test_hardlink_to_unsupported(self): - P = self.cls(BASE) - p = P / 'fileA' - # linking to another path. - q = P / 'dirA' / 'fileAA' - with self.assertRaises(pathlib.UnsupportedOperation): - q.hardlink_to(p) - - def test_rename(self): - P = self.cls(BASE) - p = P / 'fileA' - size = p.stat().st_size - # Renaming to another path. - q = P / 'dirA' / 'fileAA' - renamed_p = p.rename(q) - self.assertEqual(renamed_p, q) - self.assertEqual(q.stat().st_size, size) - self.assertFileNotFound(p.stat) - # Renaming to a str of a relative path. - r = rel_join('fileAAA') - renamed_q = q.rename(r) - self.assertEqual(renamed_q, self.cls(r)) - self.assertEqual(os.stat(r).st_size, size) - self.assertFileNotFound(q.stat) - - def test_replace(self): - P = self.cls(BASE) - p = P / 'fileA' - size = p.stat().st_size - # Replacing a non-existing path. - q = P / 'dirA' / 'fileAA' - replaced_p = p.replace(q) - self.assertEqual(replaced_p, q) - self.assertEqual(q.stat().st_size, size) - self.assertFileNotFound(p.stat) - # Replacing another (existing) path. - r = rel_join('dirB', 'fileB') - replaced_q = q.replace(r) - self.assertEqual(replaced_q, self.cls(r)) - self.assertEqual(os.stat(r).st_size, size) - self.assertFileNotFound(q.stat) - - def test_touch_common(self): - P = self.cls(BASE) - p = P / 'newfileA' - self.assertFalse(p.exists()) - p.touch() - self.assertTrue(p.exists()) - st = p.stat() - old_mtime = st.st_mtime - old_mtime_ns = st.st_mtime_ns - # Rewind the mtime sufficiently far in the past to work around - # filesystem-specific timestamp granularity. - os.utime(str(p), (old_mtime - 10, old_mtime - 10)) - # The file mtime should be refreshed by calling touch() again. - p.touch() - st = p.stat() - self.assertGreaterEqual(st.st_mtime_ns, old_mtime_ns) - self.assertGreaterEqual(st.st_mtime, old_mtime) - # Now with exist_ok=False. - p = P / 'newfileB' - self.assertFalse(p.exists()) - p.touch(mode=0o700, exist_ok=False) - self.assertTrue(p.exists()) - self.assertRaises(OSError, p.touch, exist_ok=False) - - def test_touch_nochange(self): - P = self.cls(BASE) - p = P / 'fileA' - p.touch() - with p.open('rb') as f: - self.assertEqual(f.read().strip(), b"this is file A") - - def test_mkdir(self): - P = self.cls(BASE) - p = P / 'newdirA' - self.assertFalse(p.exists()) - p.mkdir() - self.assertTrue(p.exists()) - self.assertTrue(p.is_dir()) - with self.assertRaises(OSError) as cm: - p.mkdir() - self.assertEqual(cm.exception.errno, errno.EEXIST) - - def test_mkdir_parents(self): - # Creating a chain of directories. - p = self.cls(BASE, 'newdirB', 'newdirC') - self.assertFalse(p.exists()) - with self.assertRaises(OSError) as cm: - p.mkdir() - self.assertEqual(cm.exception.errno, errno.ENOENT) - p.mkdir(parents=True) - self.assertTrue(p.exists()) - self.assertTrue(p.is_dir()) - with self.assertRaises(OSError) as cm: - p.mkdir(parents=True) - self.assertEqual(cm.exception.errno, errno.EEXIST) - # Test `mode` arg. - mode = stat.S_IMODE(p.stat().st_mode) # Default mode. - p = self.cls(BASE, 'newdirD', 'newdirE') - p.mkdir(0o555, parents=True) - self.assertTrue(p.exists()) - self.assertTrue(p.is_dir()) - if os.name != 'nt': - # The directory's permissions follow the mode argument. - self.assertEqual(stat.S_IMODE(p.stat().st_mode), 0o7555 & mode) - # The parent's permissions follow the default process settings. - self.assertEqual(stat.S_IMODE(p.parent.stat().st_mode), mode) - - def test_mkdir_exist_ok(self): - p = self.cls(BASE, 'dirB') - st_ctime_first = p.stat().st_ctime - self.assertTrue(p.exists()) - self.assertTrue(p.is_dir()) - with self.assertRaises(FileExistsError) as cm: - p.mkdir() - self.assertEqual(cm.exception.errno, errno.EEXIST) - p.mkdir(exist_ok=True) - self.assertTrue(p.exists()) - self.assertEqual(p.stat().st_ctime, st_ctime_first) - - def test_mkdir_exist_ok_with_parent(self): - p = self.cls(BASE, 'dirC') - self.assertTrue(p.exists()) - with self.assertRaises(FileExistsError) as cm: - p.mkdir() - self.assertEqual(cm.exception.errno, errno.EEXIST) - p = p / 'newdirC' - p.mkdir(parents=True) - st_ctime_first = p.stat().st_ctime - self.assertTrue(p.exists()) - with self.assertRaises(FileExistsError) as cm: - p.mkdir(parents=True) - self.assertEqual(cm.exception.errno, errno.EEXIST) - p.mkdir(parents=True, exist_ok=True) - self.assertTrue(p.exists()) - self.assertEqual(p.stat().st_ctime, st_ctime_first) - - @unittest.skipIf(is_emscripten, "FS root cannot be modified on Emscripten.") - def test_mkdir_exist_ok_root(self): - # Issue #25803: A drive root could raise PermissionError on Windows. - self.cls('/').resolve().mkdir(exist_ok=True) - self.cls('/').resolve().mkdir(parents=True, exist_ok=True) - - @only_nt # XXX: not sure how to test this on POSIX. - def test_mkdir_with_unknown_drive(self): - for d in 'ZYXWVUTSRQPONMLKJIHGFEDCBA': - p = self.cls(d + ':\\') - if not p.is_dir(): - break - else: - self.skipTest("cannot find a drive that doesn't exist") - with self.assertRaises(OSError): - (p / 'child' / 'path').mkdir(parents=True) - - def test_mkdir_with_child_file(self): - p = self.cls(BASE, 'dirB', 'fileB') - self.assertTrue(p.exists()) - # An exception is raised when the last path component is an existing - # regular file, regardless of whether exist_ok is true or not. - with self.assertRaises(FileExistsError) as cm: - p.mkdir(parents=True) - self.assertEqual(cm.exception.errno, errno.EEXIST) - with self.assertRaises(FileExistsError) as cm: - p.mkdir(parents=True, exist_ok=True) - self.assertEqual(cm.exception.errno, errno.EEXIST) - - def test_mkdir_no_parents_file(self): - p = self.cls(BASE, 'fileA') - self.assertTrue(p.exists()) - # An exception is raised when the last path component is an existing - # regular file, regardless of whether exist_ok is true or not. - with self.assertRaises(FileExistsError) as cm: - p.mkdir() - self.assertEqual(cm.exception.errno, errno.EEXIST) - with self.assertRaises(FileExistsError) as cm: - p.mkdir(exist_ok=True) - self.assertEqual(cm.exception.errno, errno.EEXIST) - - def test_mkdir_concurrent_parent_creation(self): - for pattern_num in range(32): - p = self.cls(BASE, 'dirCPC%d' % pattern_num) - self.assertFalse(p.exists()) - - real_mkdir = os.mkdir - def my_mkdir(path, mode=0o777): - path = str(path) - # Emulate another process that would create the directory - # just before we try to create it ourselves. We do it - # in all possible pattern combinations, assuming that this - # function is called at most 5 times (dirCPC/dir1/dir2, - # dirCPC/dir1, dirCPC, dirCPC/dir1, dirCPC/dir1/dir2). - if pattern.pop(): - real_mkdir(path, mode) # From another process. - concurrently_created.add(path) - real_mkdir(path, mode) # Our real call. - - pattern = [bool(pattern_num & (1 << n)) for n in range(5)] - concurrently_created = set() - p12 = p / 'dir1' / 'dir2' - try: - with mock.patch("os.mkdir", my_mkdir): - p12.mkdir(parents=True, exist_ok=False) - except FileExistsError: - self.assertIn(str(p12), concurrently_created) - else: - self.assertNotIn(str(p12), concurrently_created) - self.assertTrue(p.exists()) - - def test_symlink_to(self): - if not self.can_symlink: - self.skipTest("symlinks required") - P = self.cls(BASE) - target = P / 'fileA' - # Symlinking a path target. - link = P / 'dirA' / 'linkAA' - link.symlink_to(target) - self.assertEqual(link.stat(), target.stat()) - self.assertNotEqual(link.lstat(), target.stat()) - # Symlinking a str target. - link = P / 'dirA' / 'linkAAA' - link.symlink_to(str(target)) - self.assertEqual(link.stat(), target.stat()) - self.assertNotEqual(link.lstat(), target.stat()) - self.assertFalse(link.is_dir()) - # Symlinking to a directory. - target = P / 'dirB' - link = P / 'dirA' / 'linkAAAA' - link.symlink_to(target, target_is_directory=True) - self.assertEqual(link.stat(), target.stat()) - self.assertNotEqual(link.lstat(), target.stat()) - self.assertTrue(link.is_dir()) - self.assertTrue(list(link.iterdir())) - - @unittest.skipIf(hasattr(os, "symlink"), "os.symlink() is present") - def test_symlink_to_unsupported(self): - P = self.cls(BASE) - p = P / 'fileA' - # linking to another path. - q = P / 'dirA' / 'fileAA' - with self.assertRaises(pathlib.UnsupportedOperation): - q.symlink_to(p) - - def test_is_junction(self): - P = self.cls(BASE) - - with mock.patch.object(P.pathmod, 'isjunction'): - self.assertEqual(P.is_junction(), P.pathmod.isjunction.return_value) - P.pathmod.isjunction.assert_called_once_with(P) - - @unittest.skipUnless(hasattr(os, "mkfifo"), "os.mkfifo() required") - @unittest.skipIf(sys.platform == "vxworks", - "fifo requires special path on VxWorks") - def test_is_fifo_true(self): - P = self.cls(BASE, 'myfifo') - try: - os.mkfifo(str(P)) - except PermissionError as e: - self.skipTest('os.mkfifo(): %s' % e) - self.assertTrue(P.is_fifo()) - self.assertFalse(P.is_socket()) - self.assertFalse(P.is_file()) - self.assertIs(self.cls(BASE, 'myfifo\udfff').is_fifo(), False) - self.assertIs(self.cls(BASE, 'myfifo\x00').is_fifo(), False) - - @unittest.skipUnless(hasattr(socket, "AF_UNIX"), "Unix sockets required") - @unittest.skipIf( - is_emscripten, "Unix sockets are not implemented on Emscripten." - ) - @unittest.skipIf( - is_wasi, "Cannot create socket on WASI." - ) - def test_is_socket_true(self): - P = self.cls(BASE, 'mysock') - sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - self.addCleanup(sock.close) - try: - sock.bind(str(P)) - except OSError as e: - if (isinstance(e, PermissionError) or - "AF_UNIX path too long" in str(e)): - self.skipTest("cannot bind Unix socket: " + str(e)) - self.assertTrue(P.is_socket()) - self.assertFalse(P.is_fifo()) - self.assertFalse(P.is_file()) - self.assertIs(self.cls(BASE, 'mysock\udfff').is_socket(), False) - self.assertIs(self.cls(BASE, 'mysock\x00').is_socket(), False) - - def test_is_char_device_true(self): - # Under Unix, /dev/null should generally be a char device. - P = self.cls('/dev/null') - if not P.exists(): - self.skipTest("/dev/null required") - self.assertTrue(P.is_char_device()) - self.assertFalse(P.is_block_device()) - self.assertFalse(P.is_file()) - self.assertIs(self.cls('/dev/null\udfff').is_char_device(), False) - self.assertIs(self.cls('/dev/null\x00').is_char_device(), False) - - def test_is_mount_root(self): - if os.name == 'nt': - R = self.cls('c:\\') - else: - R = self.cls('/') - self.assertTrue(R.is_mount()) - self.assertFalse((R / '\udfff').is_mount()) - - def test_passing_kwargs_deprecated(self): - with self.assertWarns(DeprecationWarning): - self.cls(foo="bar") - - def setUpWalk(self): - super().setUpWalk() - sub21_path= self.sub2_path / "SUB21" - tmp5_path = sub21_path / "tmp3" - broken_link3_path = self.sub2_path / "broken_link3" - - os.makedirs(sub21_path) - tmp5_path.write_text("I am tmp5, blame test_pathlib.") - if self.can_symlink: - os.symlink(tmp5_path, broken_link3_path) - self.sub2_tree[2].append('broken_link3') - self.sub2_tree[2].sort() - if not is_emscripten: - # Emscripten fails with inaccessible directories. - os.chmod(sub21_path, 0) - try: - os.listdir(sub21_path) - except PermissionError: - self.sub2_tree[1].append('SUB21') - else: - os.chmod(sub21_path, stat.S_IRWXU) - os.unlink(tmp5_path) - os.rmdir(sub21_path) - - def test_walk_bad_dir(self): - self.setUpWalk() - errors = [] - walk_it = self.walk_path.walk(on_error=errors.append) - root, dirs, files = next(walk_it) - self.assertEqual(errors, []) - dir1 = 'SUB1' - path1 = root / dir1 - path1new = (root / dir1).with_suffix(".new") - path1.rename(path1new) - try: - roots = [r for r, _, _ in walk_it] - self.assertTrue(errors) - self.assertNotIn(path1, roots) - self.assertNotIn(path1new, roots) - for dir2 in dirs: - if dir2 != dir1: - self.assertIn(root / dir2, roots) - finally: - path1new.rename(path1) - - def test_walk_many_open_files(self): - depth = 30 - base = self.cls(BASE, 'deep') - path = self.cls(base, *(['d']*depth)) - path.mkdir(parents=True) - - iters = [base.walk(top_down=False) for _ in range(100)] - for i in range(depth + 1): - expected = (path, ['d'] if i else [], []) - for it in iters: - self.assertEqual(next(it), expected) - path = path.parent - - iters = [base.walk(top_down=True) for _ in range(100)] - path = base - for i in range(depth + 1): - expected = (path, ['d'] if i < depth else [], []) - for it in iters: - self.assertEqual(next(it), expected) - path = path / 'd' - - -@only_posix -class PosixPathTest(PathTest, PurePosixPathTest): - cls = pathlib.PosixPath - - def test_absolute(self): - P = self.cls - self.assertEqual(str(P('/').absolute()), '/') - self.assertEqual(str(P('/a').absolute()), '/a') - self.assertEqual(str(P('/a/b').absolute()), '/a/b') - - # '//'-prefixed absolute path (supported by POSIX). - self.assertEqual(str(P('//').absolute()), '//') - self.assertEqual(str(P('//a').absolute()), '//a') - self.assertEqual(str(P('//a/b').absolute()), '//a/b') - - @unittest.skipIf( - is_emscripten or is_wasi, - "umask is not implemented on Emscripten/WASI." - ) - def test_open_mode(self): - old_mask = os.umask(0) - self.addCleanup(os.umask, old_mask) - p = self.cls(BASE) - with (p / 'new_file').open('wb'): - pass - st = os.stat(join('new_file')) - self.assertEqual(stat.S_IMODE(st.st_mode), 0o666) - os.umask(0o022) - with (p / 'other_new_file').open('wb'): - pass - st = os.stat(join('other_new_file')) - self.assertEqual(stat.S_IMODE(st.st_mode), 0o644) - - def test_resolve_root(self): - current_directory = os.getcwd() - try: - os.chdir('/') - p = self.cls('spam') - self.assertEqual(str(p.resolve()), '/spam') - finally: - os.chdir(current_directory) - - @unittest.skipIf( - is_emscripten or is_wasi, - "umask is not implemented on Emscripten/WASI." - ) - def test_touch_mode(self): - old_mask = os.umask(0) - self.addCleanup(os.umask, old_mask) - p = self.cls(BASE) - (p / 'new_file').touch() - st = os.stat(join('new_file')) - self.assertEqual(stat.S_IMODE(st.st_mode), 0o666) - os.umask(0o022) - (p / 'other_new_file').touch() - st = os.stat(join('other_new_file')) - self.assertEqual(stat.S_IMODE(st.st_mode), 0o644) - (p / 'masked_new_file').touch(mode=0o750) - st = os.stat(join('masked_new_file')) - self.assertEqual(stat.S_IMODE(st.st_mode), 0o750) - - def test_glob(self): - P = self.cls - p = P(BASE) - given = set(p.glob("FILEa")) - expect = set() if not os_helper.fs_is_case_insensitive(BASE) else given - self.assertEqual(given, expect) - self.assertEqual(set(p.glob("FILEa*")), set()) - - def test_rglob(self): - P = self.cls - p = P(BASE, "dirC") - given = set(p.rglob("FILEd")) - expect = set() if not os_helper.fs_is_case_insensitive(BASE) else given - self.assertEqual(given, expect) - self.assertEqual(set(p.rglob("FILEd*")), set()) - - @unittest.skipUnless(hasattr(pwd, 'getpwall'), - 'pwd module does not expose getpwall()') - @unittest.skipIf(sys.platform == "vxworks", - "no home directory on VxWorks") - def test_expanduser(self): - P = self.cls - import_helper.import_module('pwd') - import pwd - pwdent = pwd.getpwuid(os.getuid()) - username = pwdent.pw_name - userhome = pwdent.pw_dir.rstrip('/') or '/' - # Find arbitrary different user (if exists). - for pwdent in pwd.getpwall(): - othername = pwdent.pw_name - otherhome = pwdent.pw_dir.rstrip('/') - if othername != username and otherhome: - break - else: - othername = username - otherhome = userhome - - fakename = 'fakeuser' - # This user can theoretically exist on a test runner. Create unique name: - try: - while pwd.getpwnam(fakename): - fakename += '1' - except KeyError: - pass # Non-existent name found - - p1 = P('~/Documents') - p2 = P(f'~{username}/Documents') - p3 = P(f'~{othername}/Documents') - p4 = P(f'../~{username}/Documents') - p5 = P(f'/~{username}/Documents') - p6 = P('') - p7 = P(f'~{fakename}/Documents') - - with os_helper.EnvironmentVarGuard() as env: - env.pop('HOME', None) - - self.assertEqual(p1.expanduser(), P(userhome) / 'Documents') - self.assertEqual(p2.expanduser(), P(userhome) / 'Documents') - self.assertEqual(p3.expanduser(), P(otherhome) / 'Documents') - self.assertEqual(p4.expanduser(), p4) - self.assertEqual(p5.expanduser(), p5) - self.assertEqual(p6.expanduser(), p6) - self.assertRaises(RuntimeError, p7.expanduser) - - env['HOME'] = '/tmp' - self.assertEqual(p1.expanduser(), P('/tmp/Documents')) - self.assertEqual(p2.expanduser(), P(userhome) / 'Documents') - self.assertEqual(p3.expanduser(), P(otherhome) / 'Documents') - self.assertEqual(p4.expanduser(), p4) - self.assertEqual(p5.expanduser(), p5) - self.assertEqual(p6.expanduser(), p6) - self.assertRaises(RuntimeError, p7.expanduser) - - @unittest.skipIf(sys.platform != "darwin", - "Bad file descriptor in /dev/fd affects only macOS") - def test_handling_bad_descriptor(self): - try: - file_descriptors = list(pathlib.Path('/dev/fd').rglob("*"))[3:] - if not file_descriptors: - self.skipTest("no file descriptors - issue was not reproduced") - # Checking all file descriptors because there is no guarantee - # which one will fail. - for f in file_descriptors: - f.exists() - f.is_dir() - f.is_file() - f.is_symlink() - f.is_block_device() - f.is_char_device() - f.is_fifo() - f.is_socket() - except OSError as e: - if e.errno == errno.EBADF: - self.fail("Bad file descriptor not handled.") - raise - - def test_from_uri(self): - P = self.cls - self.assertEqual(P.from_uri('file:/foo/bar'), P('/foo/bar')) - self.assertEqual(P.from_uri('file://foo/bar'), P('//foo/bar')) - self.assertEqual(P.from_uri('file:///foo/bar'), P('/foo/bar')) - self.assertEqual(P.from_uri('file:////foo/bar'), P('//foo/bar')) - self.assertEqual(P.from_uri('file://localhost/foo/bar'), P('/foo/bar')) - self.assertRaises(ValueError, P.from_uri, 'foo/bar') - self.assertRaises(ValueError, P.from_uri, '/foo/bar') - self.assertRaises(ValueError, P.from_uri, '//foo/bar') - self.assertRaises(ValueError, P.from_uri, 'file:foo/bar') - self.assertRaises(ValueError, P.from_uri, 'http://foo/bar') - - def test_from_uri_pathname2url(self): - P = self.cls - self.assertEqual(P.from_uri('file:' + pathname2url('/foo/bar')), P('/foo/bar')) - self.assertEqual(P.from_uri('file:' + pathname2url('//foo/bar')), P('//foo/bar')) - - -@only_nt -class WindowsPathTest(PathTest, PureWindowsPathTest): - cls = pathlib.WindowsPath - - def test_absolute(self): - P = self.cls - - # Simple absolute paths. - self.assertEqual(str(P('c:\\').absolute()), 'c:\\') - self.assertEqual(str(P('c:\\a').absolute()), 'c:\\a') - self.assertEqual(str(P('c:\\a\\b').absolute()), 'c:\\a\\b') - - # UNC absolute paths. - share = '\\\\server\\share\\' - self.assertEqual(str(P(share).absolute()), share) - self.assertEqual(str(P(share + 'a').absolute()), share + 'a') - self.assertEqual(str(P(share + 'a\\b').absolute()), share + 'a\\b') - - # UNC relative paths. - with mock.patch("os.getcwd") as getcwd: - getcwd.return_value = share - - self.assertEqual(str(P().absolute()), share) - self.assertEqual(str(P('.').absolute()), share) - self.assertEqual(str(P('a').absolute()), os.path.join(share, 'a')) - self.assertEqual(str(P('a', 'b', 'c').absolute()), - os.path.join(share, 'a', 'b', 'c')) - - drive = os.path.splitdrive(BASE)[0] - with os_helper.change_cwd(BASE): - # Relative path with root - self.assertEqual(str(P('\\').absolute()), drive + '\\') - self.assertEqual(str(P('\\foo').absolute()), drive + '\\foo') - - # Relative path on current drive - self.assertEqual(str(P(drive).absolute()), BASE) - self.assertEqual(str(P(drive + 'foo').absolute()), os.path.join(BASE, 'foo')) - - with os_helper.subst_drive(BASE) as other_drive: - # Set the working directory on the substitute drive - saved_cwd = os.getcwd() - other_cwd = f'{other_drive}\\dirA' - os.chdir(other_cwd) - os.chdir(saved_cwd) - - # Relative path on another drive - self.assertEqual(str(P(other_drive).absolute()), other_cwd) - self.assertEqual(str(P(other_drive + 'foo').absolute()), other_cwd + '\\foo') - - def test_glob(self): - P = self.cls - p = P(BASE) - self.assertEqual(set(p.glob("FILEa")), { P(BASE, "fileA") }) - self.assertEqual(set(p.glob("*a\\")), { P(BASE, "dirA/") }) - self.assertEqual(set(p.glob("F*a")), { P(BASE, "fileA") }) - self.assertEqual(set(map(str, p.glob("FILEa"))), {f"{p}\\fileA"}) - self.assertEqual(set(map(str, p.glob("F*a"))), {f"{p}\\fileA"}) - - def test_rglob(self): - P = self.cls - p = P(BASE, "dirC") - self.assertEqual(set(p.rglob("FILEd")), { P(BASE, "dirC/dirD/fileD") }) - self.assertEqual(set(p.rglob("*\\")), { P(BASE, "dirC/dirD/") }) - self.assertEqual(set(map(str, p.rglob("FILEd"))), {f"{p}\\dirD\\fileD"}) - - def test_expanduser(self): - P = self.cls - with os_helper.EnvironmentVarGuard() as env: - env.pop('HOME', None) - env.pop('USERPROFILE', None) - env.pop('HOMEPATH', None) - env.pop('HOMEDRIVE', None) - env['USERNAME'] = 'alice' - - # test that the path returns unchanged - p1 = P('~/My Documents') - p2 = P('~alice/My Documents') - p3 = P('~bob/My Documents') - p4 = P('/~/My Documents') - p5 = P('d:~/My Documents') - p6 = P('') - self.assertRaises(RuntimeError, p1.expanduser) - self.assertRaises(RuntimeError, p2.expanduser) - self.assertRaises(RuntimeError, p3.expanduser) - self.assertEqual(p4.expanduser(), p4) - self.assertEqual(p5.expanduser(), p5) - self.assertEqual(p6.expanduser(), p6) - - def check(): - env.pop('USERNAME', None) - self.assertEqual(p1.expanduser(), - P('C:/Users/alice/My Documents')) - self.assertRaises(RuntimeError, p2.expanduser) - env['USERNAME'] = 'alice' - self.assertEqual(p2.expanduser(), - P('C:/Users/alice/My Documents')) - self.assertEqual(p3.expanduser(), - P('C:/Users/bob/My Documents')) - self.assertEqual(p4.expanduser(), p4) - self.assertEqual(p5.expanduser(), p5) - self.assertEqual(p6.expanduser(), p6) - - env['HOMEPATH'] = 'C:\\Users\\alice' - check() - - env['HOMEDRIVE'] = 'C:\\' - env['HOMEPATH'] = 'Users\\alice' - check() - - env.pop('HOMEDRIVE', None) - env.pop('HOMEPATH', None) - env['USERPROFILE'] = 'C:\\Users\\alice' - check() - - # bpo-38883: ignore `HOME` when set on windows - env['HOME'] = 'C:\\Users\\eve' - check() - - def test_from_uri(self): - P = self.cls - # DOS drive paths - self.assertEqual(P.from_uri('file:c:/path/to/file'), P('c:/path/to/file')) - self.assertEqual(P.from_uri('file:c|/path/to/file'), P('c:/path/to/file')) - self.assertEqual(P.from_uri('file:/c|/path/to/file'), P('c:/path/to/file')) - self.assertEqual(P.from_uri('file:///c|/path/to/file'), P('c:/path/to/file')) - # UNC paths - self.assertEqual(P.from_uri('file://server/path/to/file'), P('//server/path/to/file')) - self.assertEqual(P.from_uri('file:////server/path/to/file'), P('//server/path/to/file')) - self.assertEqual(P.from_uri('file://///server/path/to/file'), P('//server/path/to/file')) - # Localhost paths - self.assertEqual(P.from_uri('file://localhost/c:/path/to/file'), P('c:/path/to/file')) - self.assertEqual(P.from_uri('file://localhost/c|/path/to/file'), P('c:/path/to/file')) - # Invalid paths - self.assertRaises(ValueError, P.from_uri, 'foo/bar') - self.assertRaises(ValueError, P.from_uri, 'c:/foo/bar') - self.assertRaises(ValueError, P.from_uri, '//foo/bar') - self.assertRaises(ValueError, P.from_uri, 'file:foo/bar') - self.assertRaises(ValueError, P.from_uri, 'http://foo/bar') - - def test_from_uri_pathname2url(self): - P = self.cls - self.assertEqual(P.from_uri('file:' + pathname2url(r'c:\path\to\file')), P('c:/path/to/file')) - self.assertEqual(P.from_uri('file:' + pathname2url(r'\\server\path\to\file')), P('//server/path/to/file')) - - def test_owner(self): - P = self.cls - with self.assertRaises(pathlib.UnsupportedOperation): - P('c:/').owner() - - def test_group(self): - P = self.cls - with self.assertRaises(pathlib.UnsupportedOperation): - P('c:/').group() - - -class PathSubclassTest(PathTest): - class cls(pathlib.Path): - pass - - # repr() roundtripping is not supported in custom subclass. - test_repr_roundtrips = None - - -class CompatiblePathTest(unittest.TestCase): - """ - Test that a type can be made compatible with PurePath - derivatives by implementing division operator overloads. - """ - - class CompatPath: - """ - Minimum viable class to test PurePath compatibility. - Simply uses the division operator to join a given - string and the string value of another object with - a forward slash. - """ - def __init__(self, string): - self.string = string - - def __truediv__(self, other): - return type(self)(f"{self.string}/{other}") - - def __rtruediv__(self, other): - return type(self)(f"{other}/{self.string}") - - def test_truediv(self): - result = pathlib.PurePath("test") / self.CompatPath("right") - self.assertIsInstance(result, self.CompatPath) - self.assertEqual(result.string, "test/right") - - with self.assertRaises(TypeError): - # Verify improper operations still raise a TypeError - pathlib.PurePath("test") / 10 - - def test_rtruediv(self): - result = self.CompatPath("left") / pathlib.PurePath("test") - self.assertIsInstance(result, self.CompatPath) - self.assertEqual(result.string, "left/test") - - with self.assertRaises(TypeError): - # Verify improper operations still raise a TypeError - 10 / pathlib.PurePath("test") - - -if __name__ == "__main__": - unittest.main() diff --git a/Lib/test/test_pathlib/__init__.py b/Lib/test/test_pathlib/__init__.py new file mode 100644 index 00000000000000..4b16ecc31156a5 --- /dev/null +++ b/Lib/test/test_pathlib/__init__.py @@ -0,0 +1,5 @@ +import os +from test.support import load_package_tests + +def load_tests(*args): + return load_package_tests(os.path.dirname(__file__), *args) diff --git a/Lib/test/test_pathlib/test_pathlib.py b/Lib/test/test_pathlib/test_pathlib.py new file mode 100644 index 00000000000000..00cfdd37e568a6 --- /dev/null +++ b/Lib/test/test_pathlib/test_pathlib.py @@ -0,0 +1,1972 @@ +import io +import os +import sys +import errno +import pathlib +import pickle +import socket +import stat +import tempfile +import unittest +from unittest import mock +from urllib.request import pathname2url + +from test.support import import_helper +from test.support import is_emscripten, is_wasi +from test.support import os_helper +from test.support.os_helper import TESTFN, FakePath +from test.test_pathlib import test_pathlib_abc + +try: + import grp, pwd +except ImportError: + grp = pwd = None + + +only_nt = unittest.skipIf(os.name != 'nt', + 'test requires a Windows-compatible system') +only_posix = unittest.skipIf(os.name == 'nt', + 'test requires a POSIX-compatible system') + +root_in_posix = False +if hasattr(os, 'geteuid'): + root_in_posix = (os.geteuid() == 0) + +# +# Tests for the pure classes. +# + +class PurePathTest(test_pathlib_abc.DummyPurePathTest): + cls = pathlib.PurePath + + def test_constructor_nested(self): + P = self.cls + P(FakePath("a/b/c")) + self.assertEqual(P(P('a')), P('a')) + self.assertEqual(P(P('a'), 'b'), P('a/b')) + self.assertEqual(P(P('a'), P('b')), P('a/b')) + self.assertEqual(P(P('a'), P('b'), P('c')), P(FakePath("a/b/c"))) + self.assertEqual(P(P('./a:b')), P('./a:b')) + + def test_join_nested(self): + P = self.cls + p = P('a/b').joinpath(P('c')) + self.assertEqual(p, P('a/b/c')) + + def test_div_nested(self): + P = self.cls + p = P('a/b') / P('c') + self.assertEqual(p, P('a/b/c')) + + def test_pickling_common(self): + P = self.cls + p = P('/a/b') + for proto in range(0, pickle.HIGHEST_PROTOCOL + 1): + dumped = pickle.dumps(p, proto) + pp = pickle.loads(dumped) + self.assertIs(pp.__class__, p.__class__) + self.assertEqual(pp, p) + self.assertEqual(hash(pp), hash(p)) + self.assertEqual(str(pp), str(p)) + + def test_fspath_common(self): + P = self.cls + p = P('a/b') + self._check_str(p.__fspath__(), ('a/b',)) + self._check_str(os.fspath(p), ('a/b',)) + + def test_bytes(self): + P = self.cls + message = (r"argument should be a str or an os\.PathLike object " + r"where __fspath__ returns a str, not 'bytes'") + with self.assertRaisesRegex(TypeError, message): + P(b'a') + with self.assertRaisesRegex(TypeError, message): + P(b'a', 'b') + with self.assertRaisesRegex(TypeError, message): + P('a', b'b') + with self.assertRaises(TypeError): + P('a').joinpath(b'b') + with self.assertRaises(TypeError): + P('a') / b'b' + with self.assertRaises(TypeError): + b'a' / P('b') + with self.assertRaises(TypeError): + P('a').match(b'b') + with self.assertRaises(TypeError): + P('a').relative_to(b'b') + with self.assertRaises(TypeError): + P('a').with_name(b'b') + with self.assertRaises(TypeError): + P('a').with_stem(b'b') + with self.assertRaises(TypeError): + P('a').with_suffix(b'b') + + def test_as_bytes_common(self): + sep = os.fsencode(self.sep) + P = self.cls + self.assertEqual(bytes(P('a/b')), b'a' + sep + b'b') + + def test_ordering_common(self): + # Ordering is tuple-alike. + def assertLess(a, b): + self.assertLess(a, b) + self.assertGreater(b, a) + P = self.cls + a = P('a') + b = P('a/b') + c = P('abc') + d = P('b') + assertLess(a, b) + assertLess(a, c) + assertLess(a, d) + assertLess(b, c) + assertLess(c, d) + P = self.cls + a = P('/a') + b = P('/a/b') + c = P('/abc') + d = P('/b') + assertLess(a, b) + assertLess(a, c) + assertLess(a, d) + assertLess(b, c) + assertLess(c, d) + with self.assertRaises(TypeError): + P() < {} + + def test_as_uri_common(self): + P = self.cls + with self.assertRaises(ValueError): + P('a').as_uri() + with self.assertRaises(ValueError): + P().as_uri() + + def test_repr_roundtrips(self): + for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'): + with self.subTest(pathstr=pathstr): + p = self.cls(pathstr) + r = repr(p) + # The repr() roundtrips. + q = eval(r, pathlib.__dict__) + self.assertIs(q.__class__, p.__class__) + self.assertEqual(q, p) + self.assertEqual(repr(q), r) + + +class PurePosixPathTest(PurePathTest): + cls = pathlib.PurePosixPath + + def test_parse_path(self): + check = self._check_parse_path + # Collapsing of excess leading slashes, except for the double-slash + # special case. + check('//a/b', '', '//', ['a', 'b']) + check('///a/b', '', '/', ['a', 'b']) + check('////a/b', '', '/', ['a', 'b']) + # Paths which look like NT paths aren't treated specially. + check('c:a', '', '', ['c:a',]) + check('c:\\a', '', '', ['c:\\a',]) + check('\\a', '', '', ['\\a',]) + + def test_root(self): + P = self.cls + self.assertEqual(P('/a/b').root, '/') + self.assertEqual(P('///a/b').root, '/') + # POSIX special case for two leading slashes. + self.assertEqual(P('//a/b').root, '//') + + def test_eq(self): + P = self.cls + self.assertNotEqual(P('a/b'), P('A/b')) + self.assertEqual(P('/a'), P('///a')) + self.assertNotEqual(P('/a'), P('//a')) + + def test_as_uri(self): + P = self.cls + self.assertEqual(P('/').as_uri(), 'file:///') + self.assertEqual(P('/a/b.c').as_uri(), 'file:///a/b.c') + self.assertEqual(P('/a/b%#c').as_uri(), 'file:///a/b%25%23c') + + def test_as_uri_non_ascii(self): + from urllib.parse import quote_from_bytes + P = self.cls + try: + os.fsencode('\xe9') + except UnicodeEncodeError: + self.skipTest("\\xe9 cannot be encoded to the filesystem encoding") + self.assertEqual(P('/a/b\xe9').as_uri(), + 'file:///a/b' + quote_from_bytes(os.fsencode('\xe9'))) + + def test_match(self): + P = self.cls + self.assertFalse(P('A.py').match('a.PY')) + + def test_is_absolute(self): + P = self.cls + self.assertFalse(P().is_absolute()) + self.assertFalse(P('a').is_absolute()) + self.assertFalse(P('a/b/').is_absolute()) + self.assertTrue(P('/').is_absolute()) + self.assertTrue(P('/a').is_absolute()) + self.assertTrue(P('/a/b/').is_absolute()) + self.assertTrue(P('//a').is_absolute()) + self.assertTrue(P('//a/b').is_absolute()) + + def test_is_reserved(self): + P = self.cls + self.assertIs(False, P('').is_reserved()) + self.assertIs(False, P('/').is_reserved()) + self.assertIs(False, P('/foo/bar').is_reserved()) + self.assertIs(False, P('/dev/con/PRN/NUL').is_reserved()) + + def test_join(self): + P = self.cls + p = P('//a') + pp = p.joinpath('b') + self.assertEqual(pp, P('//a/b')) + pp = P('/a').joinpath('//c') + self.assertEqual(pp, P('//c')) + pp = P('//a').joinpath('/c') + self.assertEqual(pp, P('/c')) + + def test_div(self): + # Basically the same as joinpath(). + P = self.cls + p = P('//a') + pp = p / 'b' + self.assertEqual(pp, P('//a/b')) + pp = P('/a') / '//c' + self.assertEqual(pp, P('//c')) + pp = P('//a') / '/c' + self.assertEqual(pp, P('/c')) + + def test_parse_windows_path(self): + P = self.cls + p = P('c:', 'a', 'b') + pp = P(pathlib.PureWindowsPath('c:\\a\\b')) + self.assertEqual(p, pp) + + +class PureWindowsPathTest(PurePathTest): + cls = pathlib.PureWindowsPath + + equivalences = PurePathTest.equivalences.copy() + equivalences.update({ + './a:b': [ ('./a:b',) ], + 'c:a': [ ('c:', 'a'), ('c:', 'a/'), ('.', 'c:', 'a') ], + 'c:/a': [ + ('c:/', 'a'), ('c:', '/', 'a'), ('c:', '/a'), + ('/z', 'c:/', 'a'), ('//x/y', 'c:/', 'a'), + ], + '//a/b/': [ ('//a/b',) ], + '//a/b/c': [ + ('//a/b', 'c'), ('//a/b/', 'c'), + ], + }) + + def test_parse_path(self): + check = self._check_parse_path + # First part is anchored. + check('c:', 'c:', '', []) + check('c:/', 'c:', '\\', []) + check('/', '', '\\', []) + check('c:a', 'c:', '', ['a']) + check('c:/a', 'c:', '\\', ['a']) + check('/a', '', '\\', ['a']) + # UNC paths. + check('//', '\\\\', '', []) + check('//a', '\\\\a', '', []) + check('//a/', '\\\\a\\', '', []) + check('//a/b', '\\\\a\\b', '\\', []) + check('//a/b/', '\\\\a\\b', '\\', []) + check('//a/b/c', '\\\\a\\b', '\\', ['c']) + # Collapsing and stripping excess slashes. + check('Z://b//c/d/', 'Z:', '\\', ['b', 'c', 'd']) + # UNC paths. + check('//b/c//d', '\\\\b\\c', '\\', ['d']) + # Extended paths. + check('//./c:', '\\\\.\\c:', '', []) + check('//?/c:/', '\\\\?\\c:', '\\', []) + check('//?/c:/a', '\\\\?\\c:', '\\', ['a']) + # Extended UNC paths (format is "\\?\UNC\server\share"). + check('//?', '\\\\?', '', []) + check('//?/', '\\\\?\\', '', []) + check('//?/UNC', '\\\\?\\UNC', '', []) + check('//?/UNC/', '\\\\?\\UNC\\', '', []) + check('//?/UNC/b', '\\\\?\\UNC\\b', '', []) + check('//?/UNC/b/', '\\\\?\\UNC\\b\\', '', []) + check('//?/UNC/b/c', '\\\\?\\UNC\\b\\c', '\\', []) + check('//?/UNC/b/c/', '\\\\?\\UNC\\b\\c', '\\', []) + check('//?/UNC/b/c/d', '\\\\?\\UNC\\b\\c', '\\', ['d']) + # UNC device paths + check('//./BootPartition/', '\\\\.\\BootPartition', '\\', []) + check('//?/BootPartition/', '\\\\?\\BootPartition', '\\', []) + check('//./PhysicalDrive0', '\\\\.\\PhysicalDrive0', '', []) + check('//?/Volume{}/', '\\\\?\\Volume{}', '\\', []) + check('//./nul', '\\\\.\\nul', '', []) + # Paths to files with NTFS alternate data streams + check('./c:s', '', '', ['c:s']) + check('cc:s', '', '', ['cc:s']) + check('C:c:s', 'C:', '', ['c:s']) + check('C:/c:s', 'C:', '\\', ['c:s']) + check('D:a/c:b', 'D:', '', ['a', 'c:b']) + check('D:/a/c:b', 'D:', '\\', ['a', 'c:b']) + + def test_str(self): + p = self.cls('a/b/c') + self.assertEqual(str(p), 'a\\b\\c') + p = self.cls('c:/a/b/c') + self.assertEqual(str(p), 'c:\\a\\b\\c') + p = self.cls('//a/b') + self.assertEqual(str(p), '\\\\a\\b\\') + p = self.cls('//a/b/c') + self.assertEqual(str(p), '\\\\a\\b\\c') + p = self.cls('//a/b/c/d') + self.assertEqual(str(p), '\\\\a\\b\\c\\d') + + def test_str_subclass(self): + self._check_str_subclass('.\\a:b') + self._check_str_subclass('c:') + self._check_str_subclass('c:a') + self._check_str_subclass('c:a\\b.txt') + self._check_str_subclass('c:\\') + self._check_str_subclass('c:\\a') + self._check_str_subclass('c:\\a\\b.txt') + self._check_str_subclass('\\\\some\\share') + self._check_str_subclass('\\\\some\\share\\a') + self._check_str_subclass('\\\\some\\share\\a\\b.txt') + + def test_eq(self): + P = self.cls + self.assertEqual(P('c:a/b'), P('c:a/b')) + self.assertEqual(P('c:a/b'), P('c:', 'a', 'b')) + self.assertNotEqual(P('c:a/b'), P('d:a/b')) + self.assertNotEqual(P('c:a/b'), P('c:/a/b')) + self.assertNotEqual(P('/a/b'), P('c:/a/b')) + # Case-insensitivity. + self.assertEqual(P('a/B'), P('A/b')) + self.assertEqual(P('C:a/B'), P('c:A/b')) + self.assertEqual(P('//Some/SHARE/a/B'), P('//somE/share/A/b')) + self.assertEqual(P('\u0130'), P('i\u0307')) + + def test_as_uri(self): + P = self.cls + with self.assertRaises(ValueError): + P('/a/b').as_uri() + with self.assertRaises(ValueError): + P('c:a/b').as_uri() + self.assertEqual(P('c:/').as_uri(), 'file:///c:/') + self.assertEqual(P('c:/a/b.c').as_uri(), 'file:///c:/a/b.c') + self.assertEqual(P('c:/a/b%#c').as_uri(), 'file:///c:/a/b%25%23c') + self.assertEqual(P('c:/a/b\xe9').as_uri(), 'file:///c:/a/b%C3%A9') + self.assertEqual(P('//some/share/').as_uri(), 'file://some/share/') + self.assertEqual(P('//some/share/a/b.c').as_uri(), + 'file://some/share/a/b.c') + self.assertEqual(P('//some/share/a/b%#c\xe9').as_uri(), + 'file://some/share/a/b%25%23c%C3%A9') + + def test_match(self): + P = self.cls + # Absolute patterns. + self.assertTrue(P('c:/b.py').match('*:/*.py')) + self.assertTrue(P('c:/b.py').match('c:/*.py')) + self.assertFalse(P('d:/b.py').match('c:/*.py')) # wrong drive + self.assertFalse(P('b.py').match('/*.py')) + self.assertFalse(P('b.py').match('c:*.py')) + self.assertFalse(P('b.py').match('c:/*.py')) + self.assertFalse(P('c:b.py').match('/*.py')) + self.assertFalse(P('c:b.py').match('c:/*.py')) + self.assertFalse(P('/b.py').match('c:*.py')) + self.assertFalse(P('/b.py').match('c:/*.py')) + # UNC patterns. + self.assertTrue(P('//some/share/a.py').match('//*/*/*.py')) + self.assertTrue(P('//some/share/a.py').match('//some/share/*.py')) + self.assertFalse(P('//other/share/a.py').match('//some/share/*.py')) + self.assertFalse(P('//some/share/a/b.py').match('//some/share/*.py')) + # Case-insensitivity. + self.assertTrue(P('B.py').match('b.PY')) + self.assertTrue(P('c:/a/B.Py').match('C:/A/*.pY')) + self.assertTrue(P('//Some/Share/B.Py').match('//somE/sharE/*.pY')) + # Path anchor doesn't match pattern anchor + self.assertFalse(P('c:/b.py').match('/*.py')) # 'c:/' vs '/' + self.assertFalse(P('c:/b.py').match('c:*.py')) # 'c:/' vs 'c:' + self.assertFalse(P('//some/share/a.py').match('/*.py')) # '//some/share/' vs '/' + + def test_ordering_common(self): + # Case-insensitivity. + def assertOrderedEqual(a, b): + self.assertLessEqual(a, b) + self.assertGreaterEqual(b, a) + P = self.cls + p = P('c:A/b') + q = P('C:a/B') + assertOrderedEqual(p, q) + self.assertFalse(p < q) + self.assertFalse(p > q) + p = P('//some/Share/A/b') + q = P('//Some/SHARE/a/B') + assertOrderedEqual(p, q) + self.assertFalse(p < q) + self.assertFalse(p > q) + + def test_parts(self): + P = self.cls + p = P('c:a/b') + parts = p.parts + self.assertEqual(parts, ('c:', 'a', 'b')) + p = P('c:/a/b') + parts = p.parts + self.assertEqual(parts, ('c:\\', 'a', 'b')) + p = P('//a/b/c/d') + parts = p.parts + self.assertEqual(parts, ('\\\\a\\b\\', 'c', 'd')) + + def test_parent(self): + # Anchored + P = self.cls + p = P('z:a/b/c') + self.assertEqual(p.parent, P('z:a/b')) + self.assertEqual(p.parent.parent, P('z:a')) + self.assertEqual(p.parent.parent.parent, P('z:')) + self.assertEqual(p.parent.parent.parent.parent, P('z:')) + p = P('z:/a/b/c') + self.assertEqual(p.parent, P('z:/a/b')) + self.assertEqual(p.parent.parent, P('z:/a')) + self.assertEqual(p.parent.parent.parent, P('z:/')) + self.assertEqual(p.parent.parent.parent.parent, P('z:/')) + p = P('//a/b/c/d') + self.assertEqual(p.parent, P('//a/b/c')) + self.assertEqual(p.parent.parent, P('//a/b')) + self.assertEqual(p.parent.parent.parent, P('//a/b')) + + def test_parents(self): + # Anchored + P = self.cls + p = P('z:a/b/') + par = p.parents + self.assertEqual(len(par), 2) + self.assertEqual(par[0], P('z:a')) + self.assertEqual(par[1], P('z:')) + self.assertEqual(par[0:1], (P('z:a'),)) + self.assertEqual(par[:-1], (P('z:a'),)) + self.assertEqual(par[:2], (P('z:a'), P('z:'))) + self.assertEqual(par[1:], (P('z:'),)) + self.assertEqual(par[::2], (P('z:a'),)) + self.assertEqual(par[::-1], (P('z:'), P('z:a'))) + self.assertEqual(list(par), [P('z:a'), P('z:')]) + with self.assertRaises(IndexError): + par[2] + p = P('z:/a/b/') + par = p.parents + self.assertEqual(len(par), 2) + self.assertEqual(par[0], P('z:/a')) + self.assertEqual(par[1], P('z:/')) + self.assertEqual(par[0:1], (P('z:/a'),)) + self.assertEqual(par[0:-1], (P('z:/a'),)) + self.assertEqual(par[:2], (P('z:/a'), P('z:/'))) + self.assertEqual(par[1:], (P('z:/'),)) + self.assertEqual(par[::2], (P('z:/a'),)) + self.assertEqual(par[::-1], (P('z:/'), P('z:/a'),)) + self.assertEqual(list(par), [P('z:/a'), P('z:/')]) + with self.assertRaises(IndexError): + par[2] + p = P('//a/b/c/d') + par = p.parents + self.assertEqual(len(par), 2) + self.assertEqual(par[0], P('//a/b/c')) + self.assertEqual(par[1], P('//a/b')) + self.assertEqual(par[0:1], (P('//a/b/c'),)) + self.assertEqual(par[0:-1], (P('//a/b/c'),)) + self.assertEqual(par[:2], (P('//a/b/c'), P('//a/b'))) + self.assertEqual(par[1:], (P('//a/b'),)) + self.assertEqual(par[::2], (P('//a/b/c'),)) + self.assertEqual(par[::-1], (P('//a/b'), P('//a/b/c'))) + self.assertEqual(list(par), [P('//a/b/c'), P('//a/b')]) + with self.assertRaises(IndexError): + par[2] + + def test_drive(self): + P = self.cls + self.assertEqual(P('c:').drive, 'c:') + self.assertEqual(P('c:a/b').drive, 'c:') + self.assertEqual(P('c:/').drive, 'c:') + self.assertEqual(P('c:/a/b/').drive, 'c:') + self.assertEqual(P('//a/b').drive, '\\\\a\\b') + self.assertEqual(P('//a/b/').drive, '\\\\a\\b') + self.assertEqual(P('//a/b/c/d').drive, '\\\\a\\b') + self.assertEqual(P('./c:a').drive, '') + + def test_root(self): + P = self.cls + self.assertEqual(P('c:').root, '') + self.assertEqual(P('c:a/b').root, '') + self.assertEqual(P('c:/').root, '\\') + self.assertEqual(P('c:/a/b/').root, '\\') + self.assertEqual(P('//a/b').root, '\\') + self.assertEqual(P('//a/b/').root, '\\') + self.assertEqual(P('//a/b/c/d').root, '\\') + + def test_anchor(self): + P = self.cls + self.assertEqual(P('c:').anchor, 'c:') + self.assertEqual(P('c:a/b').anchor, 'c:') + self.assertEqual(P('c:/').anchor, 'c:\\') + self.assertEqual(P('c:/a/b/').anchor, 'c:\\') + self.assertEqual(P('//a/b').anchor, '\\\\a\\b\\') + self.assertEqual(P('//a/b/').anchor, '\\\\a\\b\\') + self.assertEqual(P('//a/b/c/d').anchor, '\\\\a\\b\\') + + def test_name(self): + P = self.cls + self.assertEqual(P('c:').name, '') + self.assertEqual(P('c:/').name, '') + self.assertEqual(P('c:a/b').name, 'b') + self.assertEqual(P('c:/a/b').name, 'b') + self.assertEqual(P('c:a/b.py').name, 'b.py') + self.assertEqual(P('c:/a/b.py').name, 'b.py') + self.assertEqual(P('//My.py/Share.php').name, '') + self.assertEqual(P('//My.py/Share.php/a/b').name, 'b') + + def test_suffix(self): + P = self.cls + self.assertEqual(P('c:').suffix, '') + self.assertEqual(P('c:/').suffix, '') + self.assertEqual(P('c:a/b').suffix, '') + self.assertEqual(P('c:/a/b').suffix, '') + self.assertEqual(P('c:a/b.py').suffix, '.py') + self.assertEqual(P('c:/a/b.py').suffix, '.py') + self.assertEqual(P('c:a/.hgrc').suffix, '') + self.assertEqual(P('c:/a/.hgrc').suffix, '') + self.assertEqual(P('c:a/.hg.rc').suffix, '.rc') + self.assertEqual(P('c:/a/.hg.rc').suffix, '.rc') + self.assertEqual(P('c:a/b.tar.gz').suffix, '.gz') + self.assertEqual(P('c:/a/b.tar.gz').suffix, '.gz') + self.assertEqual(P('c:a/Some name. Ending with a dot.').suffix, '') + self.assertEqual(P('c:/a/Some name. Ending with a dot.').suffix, '') + self.assertEqual(P('//My.py/Share.php').suffix, '') + self.assertEqual(P('//My.py/Share.php/a/b').suffix, '') + + def test_suffixes(self): + P = self.cls + self.assertEqual(P('c:').suffixes, []) + self.assertEqual(P('c:/').suffixes, []) + self.assertEqual(P('c:a/b').suffixes, []) + self.assertEqual(P('c:/a/b').suffixes, []) + self.assertEqual(P('c:a/b.py').suffixes, ['.py']) + self.assertEqual(P('c:/a/b.py').suffixes, ['.py']) + self.assertEqual(P('c:a/.hgrc').suffixes, []) + self.assertEqual(P('c:/a/.hgrc').suffixes, []) + self.assertEqual(P('c:a/.hg.rc').suffixes, ['.rc']) + self.assertEqual(P('c:/a/.hg.rc').suffixes, ['.rc']) + self.assertEqual(P('c:a/b.tar.gz').suffixes, ['.tar', '.gz']) + self.assertEqual(P('c:/a/b.tar.gz').suffixes, ['.tar', '.gz']) + self.assertEqual(P('//My.py/Share.php').suffixes, []) + self.assertEqual(P('//My.py/Share.php/a/b').suffixes, []) + self.assertEqual(P('c:a/Some name. Ending with a dot.').suffixes, []) + self.assertEqual(P('c:/a/Some name. Ending with a dot.').suffixes, []) + + def test_stem(self): + P = self.cls + self.assertEqual(P('c:').stem, '') + self.assertEqual(P('c:.').stem, '') + self.assertEqual(P('c:..').stem, '..') + self.assertEqual(P('c:/').stem, '') + self.assertEqual(P('c:a/b').stem, 'b') + self.assertEqual(P('c:a/b.py').stem, 'b') + self.assertEqual(P('c:a/.hgrc').stem, '.hgrc') + self.assertEqual(P('c:a/.hg.rc').stem, '.hg') + self.assertEqual(P('c:a/b.tar.gz').stem, 'b.tar') + self.assertEqual(P('c:a/Some name. Ending with a dot.').stem, + 'Some name. Ending with a dot.') + + def test_with_name(self): + P = self.cls + self.assertEqual(P('c:a/b').with_name('d.xml'), P('c:a/d.xml')) + self.assertEqual(P('c:/a/b').with_name('d.xml'), P('c:/a/d.xml')) + self.assertEqual(P('c:a/Dot ending.').with_name('d.xml'), P('c:a/d.xml')) + self.assertEqual(P('c:/a/Dot ending.').with_name('d.xml'), P('c:/a/d.xml')) + self.assertRaises(ValueError, P('c:').with_name, 'd.xml') + self.assertRaises(ValueError, P('c:/').with_name, 'd.xml') + self.assertRaises(ValueError, P('//My/Share').with_name, 'd.xml') + self.assertEqual(str(P('a').with_name('d:')), '.\\d:') + self.assertEqual(str(P('a').with_name('d:e')), '.\\d:e') + self.assertEqual(P('c:a/b').with_name('d:'), P('c:a/d:')) + self.assertEqual(P('c:a/b').with_name('d:e'), P('c:a/d:e')) + self.assertRaises(ValueError, P('c:a/b').with_name, 'd:/e') + self.assertRaises(ValueError, P('c:a/b').with_name, '//My/Share') + + def test_with_stem(self): + P = self.cls + self.assertEqual(P('c:a/b').with_stem('d'), P('c:a/d')) + self.assertEqual(P('c:/a/b').with_stem('d'), P('c:/a/d')) + self.assertEqual(P('c:a/Dot ending.').with_stem('d'), P('c:a/d')) + self.assertEqual(P('c:/a/Dot ending.').with_stem('d'), P('c:/a/d')) + self.assertRaises(ValueError, P('c:').with_stem, 'd') + self.assertRaises(ValueError, P('c:/').with_stem, 'd') + self.assertRaises(ValueError, P('//My/Share').with_stem, 'd') + self.assertEqual(str(P('a').with_stem('d:')), '.\\d:') + self.assertEqual(str(P('a').with_stem('d:e')), '.\\d:e') + self.assertEqual(P('c:a/b').with_stem('d:'), P('c:a/d:')) + self.assertEqual(P('c:a/b').with_stem('d:e'), P('c:a/d:e')) + self.assertRaises(ValueError, P('c:a/b').with_stem, 'd:/e') + self.assertRaises(ValueError, P('c:a/b').with_stem, '//My/Share') + + def test_with_suffix(self): + P = self.cls + self.assertEqual(P('c:a/b').with_suffix('.gz'), P('c:a/b.gz')) + self.assertEqual(P('c:/a/b').with_suffix('.gz'), P('c:/a/b.gz')) + self.assertEqual(P('c:a/b.py').with_suffix('.gz'), P('c:a/b.gz')) + self.assertEqual(P('c:/a/b.py').with_suffix('.gz'), P('c:/a/b.gz')) + # Path doesn't have a "filename" component. + self.assertRaises(ValueError, P('').with_suffix, '.gz') + self.assertRaises(ValueError, P('.').with_suffix, '.gz') + self.assertRaises(ValueError, P('/').with_suffix, '.gz') + self.assertRaises(ValueError, P('//My/Share').with_suffix, '.gz') + # Invalid suffix. + self.assertRaises(ValueError, P('c:a/b').with_suffix, 'gz') + self.assertRaises(ValueError, P('c:a/b').with_suffix, '/') + self.assertRaises(ValueError, P('c:a/b').with_suffix, '\\') + self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c:') + self.assertRaises(ValueError, P('c:a/b').with_suffix, '/.gz') + self.assertRaises(ValueError, P('c:a/b').with_suffix, '\\.gz') + self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c:.gz') + self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c/d') + self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c\\d') + self.assertRaises(ValueError, P('c:a/b').with_suffix, '.c/d') + self.assertRaises(ValueError, P('c:a/b').with_suffix, '.c\\d') + + def test_relative_to(self): + P = self.cls + p = P('C:Foo/Bar') + self.assertEqual(p.relative_to(P('c:')), P('Foo/Bar')) + self.assertEqual(p.relative_to('c:'), P('Foo/Bar')) + self.assertEqual(p.relative_to(P('c:foO')), P('Bar')) + self.assertEqual(p.relative_to('c:foO'), P('Bar')) + self.assertEqual(p.relative_to('c:foO/'), P('Bar')) + self.assertEqual(p.relative_to(P('c:foO/baR')), P()) + self.assertEqual(p.relative_to('c:foO/baR'), P()) + self.assertEqual(p.relative_to(P('c:'), walk_up=True), P('Foo/Bar')) + self.assertEqual(p.relative_to('c:', walk_up=True), P('Foo/Bar')) + self.assertEqual(p.relative_to(P('c:foO'), walk_up=True), P('Bar')) + self.assertEqual(p.relative_to('c:foO', walk_up=True), P('Bar')) + self.assertEqual(p.relative_to('c:foO/', walk_up=True), P('Bar')) + self.assertEqual(p.relative_to(P('c:foO/baR'), walk_up=True), P()) + self.assertEqual(p.relative_to('c:foO/baR', walk_up=True), P()) + self.assertEqual(p.relative_to(P('C:Foo/Bar/Baz'), walk_up=True), P('..')) + self.assertEqual(p.relative_to(P('C:Foo/Baz'), walk_up=True), P('../Bar')) + self.assertEqual(p.relative_to(P('C:Baz/Bar'), walk_up=True), P('../../Foo/Bar')) + # Unrelated paths. + self.assertRaises(ValueError, p.relative_to, P()) + self.assertRaises(ValueError, p.relative_to, '') + self.assertRaises(ValueError, p.relative_to, P('d:')) + self.assertRaises(ValueError, p.relative_to, P('/')) + self.assertRaises(ValueError, p.relative_to, P('Foo')) + self.assertRaises(ValueError, p.relative_to, P('/Foo')) + self.assertRaises(ValueError, p.relative_to, P('C:/Foo')) + self.assertRaises(ValueError, p.relative_to, P('C:Foo/Bar/Baz')) + self.assertRaises(ValueError, p.relative_to, P('C:Foo/Baz')) + self.assertRaises(ValueError, p.relative_to, P(), walk_up=True) + self.assertRaises(ValueError, p.relative_to, '', walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('d:'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('/'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('Foo'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('/Foo'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('C:/Foo'), walk_up=True) + p = P('C:/Foo/Bar') + self.assertEqual(p.relative_to(P('c:/')), P('Foo/Bar')) + self.assertEqual(p.relative_to('c:/'), P('Foo/Bar')) + self.assertEqual(p.relative_to(P('c:/foO')), P('Bar')) + self.assertEqual(p.relative_to('c:/foO'), P('Bar')) + self.assertEqual(p.relative_to('c:/foO/'), P('Bar')) + self.assertEqual(p.relative_to(P('c:/foO/baR')), P()) + self.assertEqual(p.relative_to('c:/foO/baR'), P()) + self.assertEqual(p.relative_to(P('c:/'), walk_up=True), P('Foo/Bar')) + self.assertEqual(p.relative_to('c:/', walk_up=True), P('Foo/Bar')) + self.assertEqual(p.relative_to(P('c:/foO'), walk_up=True), P('Bar')) + self.assertEqual(p.relative_to('c:/foO', walk_up=True), P('Bar')) + self.assertEqual(p.relative_to('c:/foO/', walk_up=True), P('Bar')) + self.assertEqual(p.relative_to(P('c:/foO/baR'), walk_up=True), P()) + self.assertEqual(p.relative_to('c:/foO/baR', walk_up=True), P()) + self.assertEqual(p.relative_to('C:/Baz', walk_up=True), P('../Foo/Bar')) + self.assertEqual(p.relative_to('C:/Foo/Bar/Baz', walk_up=True), P('..')) + self.assertEqual(p.relative_to('C:/Foo/Baz', walk_up=True), P('../Bar')) + # Unrelated paths. + self.assertRaises(ValueError, p.relative_to, 'c:') + self.assertRaises(ValueError, p.relative_to, P('c:')) + self.assertRaises(ValueError, p.relative_to, P('C:/Baz')) + self.assertRaises(ValueError, p.relative_to, P('C:/Foo/Bar/Baz')) + self.assertRaises(ValueError, p.relative_to, P('C:/Foo/Baz')) + self.assertRaises(ValueError, p.relative_to, P('C:Foo')) + self.assertRaises(ValueError, p.relative_to, P('d:')) + self.assertRaises(ValueError, p.relative_to, P('d:/')) + self.assertRaises(ValueError, p.relative_to, P('/')) + self.assertRaises(ValueError, p.relative_to, P('/Foo')) + self.assertRaises(ValueError, p.relative_to, P('//C/Foo')) + self.assertRaises(ValueError, p.relative_to, 'c:', walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('c:'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('C:Foo'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('d:'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('d:/'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('/'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('/Foo'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('//C/Foo'), walk_up=True) + # UNC paths. + p = P('//Server/Share/Foo/Bar') + self.assertEqual(p.relative_to(P('//sErver/sHare')), P('Foo/Bar')) + self.assertEqual(p.relative_to('//sErver/sHare'), P('Foo/Bar')) + self.assertEqual(p.relative_to('//sErver/sHare/'), P('Foo/Bar')) + self.assertEqual(p.relative_to(P('//sErver/sHare/Foo')), P('Bar')) + self.assertEqual(p.relative_to('//sErver/sHare/Foo'), P('Bar')) + self.assertEqual(p.relative_to('//sErver/sHare/Foo/'), P('Bar')) + self.assertEqual(p.relative_to(P('//sErver/sHare/Foo/Bar')), P()) + self.assertEqual(p.relative_to('//sErver/sHare/Foo/Bar'), P()) + self.assertEqual(p.relative_to(P('//sErver/sHare'), walk_up=True), P('Foo/Bar')) + self.assertEqual(p.relative_to('//sErver/sHare', walk_up=True), P('Foo/Bar')) + self.assertEqual(p.relative_to('//sErver/sHare/', walk_up=True), P('Foo/Bar')) + self.assertEqual(p.relative_to(P('//sErver/sHare/Foo'), walk_up=True), P('Bar')) + self.assertEqual(p.relative_to('//sErver/sHare/Foo', walk_up=True), P('Bar')) + self.assertEqual(p.relative_to('//sErver/sHare/Foo/', walk_up=True), P('Bar')) + self.assertEqual(p.relative_to(P('//sErver/sHare/Foo/Bar'), walk_up=True), P()) + self.assertEqual(p.relative_to('//sErver/sHare/Foo/Bar', walk_up=True), P()) + self.assertEqual(p.relative_to(P('//sErver/sHare/bar'), walk_up=True), P('../Foo/Bar')) + self.assertEqual(p.relative_to('//sErver/sHare/bar', walk_up=True), P('../Foo/Bar')) + # Unrelated paths. + self.assertRaises(ValueError, p.relative_to, P('/Server/Share/Foo')) + self.assertRaises(ValueError, p.relative_to, P('c:/Server/Share/Foo')) + self.assertRaises(ValueError, p.relative_to, P('//z/Share/Foo')) + self.assertRaises(ValueError, p.relative_to, P('//Server/z/Foo')) + self.assertRaises(ValueError, p.relative_to, P('/Server/Share/Foo'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('c:/Server/Share/Foo'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('//z/Share/Foo'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('//Server/z/Foo'), walk_up=True) + + def test_is_relative_to(self): + P = self.cls + p = P('C:Foo/Bar') + self.assertTrue(p.is_relative_to(P('c:'))) + self.assertTrue(p.is_relative_to('c:')) + self.assertTrue(p.is_relative_to(P('c:foO'))) + self.assertTrue(p.is_relative_to('c:foO')) + self.assertTrue(p.is_relative_to('c:foO/')) + self.assertTrue(p.is_relative_to(P('c:foO/baR'))) + self.assertTrue(p.is_relative_to('c:foO/baR')) + # Unrelated paths. + self.assertFalse(p.is_relative_to(P())) + self.assertFalse(p.is_relative_to('')) + self.assertFalse(p.is_relative_to(P('d:'))) + self.assertFalse(p.is_relative_to(P('/'))) + self.assertFalse(p.is_relative_to(P('Foo'))) + self.assertFalse(p.is_relative_to(P('/Foo'))) + self.assertFalse(p.is_relative_to(P('C:/Foo'))) + self.assertFalse(p.is_relative_to(P('C:Foo/Bar/Baz'))) + self.assertFalse(p.is_relative_to(P('C:Foo/Baz'))) + p = P('C:/Foo/Bar') + self.assertTrue(p.is_relative_to(P('c:/'))) + self.assertTrue(p.is_relative_to(P('c:/foO'))) + self.assertTrue(p.is_relative_to('c:/foO/')) + self.assertTrue(p.is_relative_to(P('c:/foO/baR'))) + self.assertTrue(p.is_relative_to('c:/foO/baR')) + # Unrelated paths. + self.assertFalse(p.is_relative_to('c:')) + self.assertFalse(p.is_relative_to(P('C:/Baz'))) + self.assertFalse(p.is_relative_to(P('C:/Foo/Bar/Baz'))) + self.assertFalse(p.is_relative_to(P('C:/Foo/Baz'))) + self.assertFalse(p.is_relative_to(P('C:Foo'))) + self.assertFalse(p.is_relative_to(P('d:'))) + self.assertFalse(p.is_relative_to(P('d:/'))) + self.assertFalse(p.is_relative_to(P('/'))) + self.assertFalse(p.is_relative_to(P('/Foo'))) + self.assertFalse(p.is_relative_to(P('//C/Foo'))) + # UNC paths. + p = P('//Server/Share/Foo/Bar') + self.assertTrue(p.is_relative_to(P('//sErver/sHare'))) + self.assertTrue(p.is_relative_to('//sErver/sHare')) + self.assertTrue(p.is_relative_to('//sErver/sHare/')) + self.assertTrue(p.is_relative_to(P('//sErver/sHare/Foo'))) + self.assertTrue(p.is_relative_to('//sErver/sHare/Foo')) + self.assertTrue(p.is_relative_to('//sErver/sHare/Foo/')) + self.assertTrue(p.is_relative_to(P('//sErver/sHare/Foo/Bar'))) + self.assertTrue(p.is_relative_to('//sErver/sHare/Foo/Bar')) + # Unrelated paths. + self.assertFalse(p.is_relative_to(P('/Server/Share/Foo'))) + self.assertFalse(p.is_relative_to(P('c:/Server/Share/Foo'))) + self.assertFalse(p.is_relative_to(P('//z/Share/Foo'))) + self.assertFalse(p.is_relative_to(P('//Server/z/Foo'))) + + def test_is_absolute(self): + P = self.cls + # Under NT, only paths with both a drive and a root are absolute. + self.assertFalse(P().is_absolute()) + self.assertFalse(P('a').is_absolute()) + self.assertFalse(P('a/b/').is_absolute()) + self.assertFalse(P('/').is_absolute()) + self.assertFalse(P('/a').is_absolute()) + self.assertFalse(P('/a/b/').is_absolute()) + self.assertFalse(P('c:').is_absolute()) + self.assertFalse(P('c:a').is_absolute()) + self.assertFalse(P('c:a/b/').is_absolute()) + self.assertTrue(P('c:/').is_absolute()) + self.assertTrue(P('c:/a').is_absolute()) + self.assertTrue(P('c:/a/b/').is_absolute()) + # UNC paths are absolute by definition. + self.assertTrue(P('//a/b').is_absolute()) + self.assertTrue(P('//a/b/').is_absolute()) + self.assertTrue(P('//a/b/c').is_absolute()) + self.assertTrue(P('//a/b/c/d').is_absolute()) + + def test_join(self): + P = self.cls + p = P('C:/a/b') + pp = p.joinpath('x/y') + self.assertEqual(pp, P('C:/a/b/x/y')) + pp = p.joinpath('/x/y') + self.assertEqual(pp, P('C:/x/y')) + # Joining with a different drive => the first path is ignored, even + # if the second path is relative. + pp = p.joinpath('D:x/y') + self.assertEqual(pp, P('D:x/y')) + pp = p.joinpath('D:/x/y') + self.assertEqual(pp, P('D:/x/y')) + pp = p.joinpath('//host/share/x/y') + self.assertEqual(pp, P('//host/share/x/y')) + # Joining with the same drive => the first path is appended to if + # the second path is relative. + pp = p.joinpath('c:x/y') + self.assertEqual(pp, P('C:/a/b/x/y')) + pp = p.joinpath('c:/x/y') + self.assertEqual(pp, P('C:/x/y')) + # Joining with files with NTFS data streams => the filename should + # not be parsed as a drive letter + pp = p.joinpath(P('./d:s')) + self.assertEqual(pp, P('C:/a/b/d:s')) + pp = p.joinpath(P('./dd:s')) + self.assertEqual(pp, P('C:/a/b/dd:s')) + pp = p.joinpath(P('E:d:s')) + self.assertEqual(pp, P('E:d:s')) + # Joining onto a UNC path with no root + pp = P('//').joinpath('server') + self.assertEqual(pp, P('//server')) + pp = P('//server').joinpath('share') + self.assertEqual(pp, P('//server/share')) + pp = P('//./BootPartition').joinpath('Windows') + self.assertEqual(pp, P('//./BootPartition/Windows')) + + def test_div(self): + # Basically the same as joinpath(). + P = self.cls + p = P('C:/a/b') + self.assertEqual(p / 'x/y', P('C:/a/b/x/y')) + self.assertEqual(p / 'x' / 'y', P('C:/a/b/x/y')) + self.assertEqual(p / '/x/y', P('C:/x/y')) + self.assertEqual(p / '/x' / 'y', P('C:/x/y')) + # Joining with a different drive => the first path is ignored, even + # if the second path is relative. + self.assertEqual(p / 'D:x/y', P('D:x/y')) + self.assertEqual(p / 'D:' / 'x/y', P('D:x/y')) + self.assertEqual(p / 'D:/x/y', P('D:/x/y')) + self.assertEqual(p / 'D:' / '/x/y', P('D:/x/y')) + self.assertEqual(p / '//host/share/x/y', P('//host/share/x/y')) + # Joining with the same drive => the first path is appended to if + # the second path is relative. + self.assertEqual(p / 'c:x/y', P('C:/a/b/x/y')) + self.assertEqual(p / 'c:/x/y', P('C:/x/y')) + # Joining with files with NTFS data streams => the filename should + # not be parsed as a drive letter + self.assertEqual(p / P('./d:s'), P('C:/a/b/d:s')) + self.assertEqual(p / P('./dd:s'), P('C:/a/b/dd:s')) + self.assertEqual(p / P('E:d:s'), P('E:d:s')) + + def test_is_reserved(self): + P = self.cls + self.assertIs(False, P('').is_reserved()) + self.assertIs(False, P('/').is_reserved()) + self.assertIs(False, P('/foo/bar').is_reserved()) + # UNC paths are never reserved. + self.assertIs(False, P('//my/share/nul/con/aux').is_reserved()) + # Case-insensitive DOS-device names are reserved. + self.assertIs(True, P('nul').is_reserved()) + self.assertIs(True, P('aux').is_reserved()) + self.assertIs(True, P('prn').is_reserved()) + self.assertIs(True, P('con').is_reserved()) + self.assertIs(True, P('conin$').is_reserved()) + self.assertIs(True, P('conout$').is_reserved()) + # COM/LPT + 1-9 or + superscript 1-3 are reserved. + self.assertIs(True, P('COM1').is_reserved()) + self.assertIs(True, P('LPT9').is_reserved()) + self.assertIs(True, P('com\xb9').is_reserved()) + self.assertIs(True, P('com\xb2').is_reserved()) + self.assertIs(True, P('lpt\xb3').is_reserved()) + # DOS-device name mataching ignores characters after a dot or + # a colon and also ignores trailing spaces. + self.assertIs(True, P('NUL.txt').is_reserved()) + self.assertIs(True, P('PRN ').is_reserved()) + self.assertIs(True, P('AUX .txt').is_reserved()) + self.assertIs(True, P('COM1:bar').is_reserved()) + self.assertIs(True, P('LPT9 :bar').is_reserved()) + # DOS-device names are only matched at the beginning + # of a path component. + self.assertIs(False, P('bar.com9').is_reserved()) + self.assertIs(False, P('bar.lpt9').is_reserved()) + # Only the last path component matters. + self.assertIs(True, P('c:/baz/con/NUL').is_reserved()) + self.assertIs(False, P('c:/NUL/con/baz').is_reserved()) + + +class PurePathSubclassTest(PurePathTest): + class cls(pathlib.PurePath): + pass + + # repr() roundtripping is not supported in custom subclass. + test_repr_roundtrips = None + + +# +# Tests for the concrete classes. +# + +class PathTest(test_pathlib_abc.DummyPathTest, PurePathTest): + """Tests for the FS-accessing functionalities of the Path classes.""" + cls = pathlib.Path + can_symlink = os_helper.can_symlink() + + def setUp(self): + super().setUp() + os.chmod(self.pathmod.join(self.base, 'dirE'), 0) + + def tearDown(self): + os.chmod(self.pathmod.join(self.base, 'dirE'), 0o777) + os_helper.rmtree(self.base) + + def tempdir(self): + d = os_helper._longpath(tempfile.mkdtemp(suffix='-dirD', + dir=os.getcwd())) + self.addCleanup(os_helper.rmtree, d) + return d + + def test_concrete_class(self): + if self.cls is pathlib.Path: + expected = pathlib.WindowsPath if os.name == 'nt' else pathlib.PosixPath + else: + expected = self.cls + p = self.cls('a') + self.assertIs(type(p), expected) + + def test_unsupported_pathmod(self): + if self.cls.pathmod is os.path: + self.skipTest("path flavour is supported") + else: + self.assertRaises(pathlib.UnsupportedOperation, self.cls) + + def _test_cwd(self, p): + q = self.cls(os.getcwd()) + self.assertEqual(p, q) + self.assertEqualNormCase(str(p), str(q)) + self.assertIs(type(p), type(q)) + self.assertTrue(p.is_absolute()) + + def test_cwd(self): + p = self.cls.cwd() + self._test_cwd(p) + + def test_absolute_common(self): + P = self.cls + + with mock.patch("os.getcwd") as getcwd: + getcwd.return_value = self.base + + # Simple relative paths. + self.assertEqual(str(P().absolute()), self.base) + self.assertEqual(str(P('.').absolute()), self.base) + self.assertEqual(str(P('a').absolute()), os.path.join(self.base, 'a')) + self.assertEqual(str(P('a', 'b', 'c').absolute()), os.path.join(self.base, 'a', 'b', 'c')) + + # Symlinks should not be resolved. + self.assertEqual(str(P('linkB', 'fileB').absolute()), os.path.join(self.base, 'linkB', 'fileB')) + self.assertEqual(str(P('brokenLink').absolute()), os.path.join(self.base, 'brokenLink')) + self.assertEqual(str(P('brokenLinkLoop').absolute()), os.path.join(self.base, 'brokenLinkLoop')) + + # '..' entries should be preserved and not normalised. + self.assertEqual(str(P('..').absolute()), os.path.join(self.base, '..')) + self.assertEqual(str(P('a', '..').absolute()), os.path.join(self.base, 'a', '..')) + self.assertEqual(str(P('..', 'b').absolute()), os.path.join(self.base, '..', 'b')) + + def _test_home(self, p): + q = self.cls(os.path.expanduser('~')) + self.assertEqual(p, q) + self.assertEqualNormCase(str(p), str(q)) + self.assertIs(type(p), type(q)) + self.assertTrue(p.is_absolute()) + + @unittest.skipIf( + pwd is None, reason="Test requires pwd module to get homedir." + ) + def test_home(self): + with os_helper.EnvironmentVarGuard() as env: + self._test_home(self.cls.home()) + + env.clear() + env['USERPROFILE'] = os.path.join(self.base, 'userprofile') + self._test_home(self.cls.home()) + + # bpo-38883: ignore `HOME` when set on windows + env['HOME'] = os.path.join(self.base, 'home') + self._test_home(self.cls.home()) + + @unittest.skipIf(is_wasi, "WASI has no user accounts.") + def test_expanduser_common(self): + P = self.cls + p = P('~') + self.assertEqual(p.expanduser(), P(os.path.expanduser('~'))) + p = P('foo') + self.assertEqual(p.expanduser(), p) + p = P('/~') + self.assertEqual(p.expanduser(), p) + p = P('../~') + self.assertEqual(p.expanduser(), p) + p = P(P('').absolute().anchor) / '~' + self.assertEqual(p.expanduser(), p) + p = P('~/a:b') + self.assertEqual(p.expanduser(), P(os.path.expanduser('~'), './a:b')) + + def test_with_segments(self): + class P(self.cls): + def __init__(self, *pathsegments, session_id): + super().__init__(*pathsegments) + self.session_id = session_id + + def with_segments(self, *pathsegments): + return type(self)(*pathsegments, session_id=self.session_id) + p = P(self.base, session_id=42) + self.assertEqual(42, p.absolute().session_id) + self.assertEqual(42, p.resolve().session_id) + if not is_wasi: # WASI has no user accounts. + self.assertEqual(42, p.with_segments('~').expanduser().session_id) + self.assertEqual(42, (p / 'fileA').rename(p / 'fileB').session_id) + self.assertEqual(42, (p / 'fileB').replace(p / 'fileA').session_id) + if self.can_symlink: + self.assertEqual(42, (p / 'linkA').readlink().session_id) + for path in p.iterdir(): + self.assertEqual(42, path.session_id) + for path in p.glob('*'): + self.assertEqual(42, path.session_id) + for path in p.rglob('*'): + self.assertEqual(42, path.session_id) + for dirpath, dirnames, filenames in p.walk(): + self.assertEqual(42, dirpath.session_id) + + def test_open_unbuffered(self): + p = self.cls(self.base) + with (p / 'fileA').open('rb', buffering=0) as f: + self.assertIsInstance(f, io.RawIOBase) + self.assertEqual(f.read().strip(), b"this is file A") + + def test_resolve_nonexist_relative_issue38671(self): + p = self.cls('non', 'exist') + + old_cwd = os.getcwd() + os.chdir(self.base) + try: + self.assertEqual(p.resolve(), self.cls(self.base, p)) + finally: + os.chdir(old_cwd) + + @os_helper.skip_unless_working_chmod + def test_chmod(self): + p = self.cls(self.base) / 'fileA' + mode = p.stat().st_mode + # Clear writable bit. + new_mode = mode & ~0o222 + p.chmod(new_mode) + self.assertEqual(p.stat().st_mode, new_mode) + # Set writable bit. + new_mode = mode | 0o222 + p.chmod(new_mode) + self.assertEqual(p.stat().st_mode, new_mode) + + # On Windows, os.chmod does not follow symlinks (issue #15411) + @only_posix + @os_helper.skip_unless_working_chmod + def test_chmod_follow_symlinks_true(self): + p = self.cls(self.base) / 'linkA' + q = p.resolve() + mode = q.stat().st_mode + # Clear writable bit. + new_mode = mode & ~0o222 + p.chmod(new_mode, follow_symlinks=True) + self.assertEqual(q.stat().st_mode, new_mode) + # Set writable bit + new_mode = mode | 0o222 + p.chmod(new_mode, follow_symlinks=True) + self.assertEqual(q.stat().st_mode, new_mode) + + # XXX also need a test for lchmod. + + def _get_pw_name_or_skip_test(self, uid): + try: + return pwd.getpwuid(uid).pw_name + except KeyError: + self.skipTest( + "user %d doesn't have an entry in the system database" % uid) + + @unittest.skipUnless(pwd, "the pwd module is needed for this test") + def test_owner(self): + p = self.cls(self.base) / 'fileA' + expected_uid = p.stat().st_uid + expected_name = self._get_pw_name_or_skip_test(expected_uid) + + self.assertEqual(expected_name, p.owner()) + + @unittest.skipUnless(pwd, "the pwd module is needed for this test") + @unittest.skipUnless(root_in_posix, "test needs root privilege") + def test_owner_no_follow_symlinks(self): + all_users = [u.pw_uid for u in pwd.getpwall()] + if len(all_users) < 2: + self.skipTest("test needs more than one user") + + target = self.cls(self.base) / 'fileA' + link = self.cls(self.base) / 'linkA' + + uid_1, uid_2 = all_users[:2] + os.chown(target, uid_1, -1) + os.chown(link, uid_2, -1, follow_symlinks=False) + + expected_uid = link.stat(follow_symlinks=False).st_uid + expected_name = self._get_pw_name_or_skip_test(expected_uid) + + self.assertEqual(expected_uid, uid_2) + self.assertEqual(expected_name, link.owner(follow_symlinks=False)) + + def _get_gr_name_or_skip_test(self, gid): + try: + return grp.getgrgid(gid).gr_name + except KeyError: + self.skipTest( + "group %d doesn't have an entry in the system database" % gid) + + @unittest.skipUnless(grp, "the grp module is needed for this test") + def test_group(self): + p = self.cls(self.base) / 'fileA' + expected_gid = p.stat().st_gid + expected_name = self._get_gr_name_or_skip_test(expected_gid) + + self.assertEqual(expected_name, p.group()) + + @unittest.skipUnless(grp, "the grp module is needed for this test") + @unittest.skipUnless(root_in_posix, "test needs root privilege") + def test_group_no_follow_symlinks(self): + all_groups = [g.gr_gid for g in grp.getgrall()] + if len(all_groups) < 2: + self.skipTest("test needs more than one group") + + target = self.cls(self.base) / 'fileA' + link = self.cls(self.base) / 'linkA' + + gid_1, gid_2 = all_groups[:2] + os.chown(target, -1, gid_1) + os.chown(link, -1, gid_2, follow_symlinks=False) + + expected_gid = link.stat(follow_symlinks=False).st_gid + expected_name = self._get_pw_name_or_skip_test(expected_gid) + + self.assertEqual(expected_gid, gid_2) + self.assertEqual(expected_name, link.group(follow_symlinks=False)) + + def test_unlink(self): + p = self.cls(self.base) / 'fileA' + p.unlink() + self.assertFileNotFound(p.stat) + self.assertFileNotFound(p.unlink) + + def test_unlink_missing_ok(self): + p = self.cls(self.base) / 'fileAAA' + self.assertFileNotFound(p.unlink) + p.unlink(missing_ok=True) + + def test_rmdir(self): + p = self.cls(self.base) / 'dirA' + for q in p.iterdir(): + q.unlink() + p.rmdir() + self.assertFileNotFound(p.stat) + self.assertFileNotFound(p.unlink) + + @unittest.skipUnless(hasattr(os, "link"), "os.link() is not present") + def test_hardlink_to(self): + P = self.cls(self.base) + target = P / 'fileA' + size = target.stat().st_size + # linking to another path. + link = P / 'dirA' / 'fileAA' + link.hardlink_to(target) + self.assertEqual(link.stat().st_size, size) + self.assertTrue(os.path.samefile(target, link)) + self.assertTrue(target.exists()) + # Linking to a str of a relative path. + link2 = P / 'dirA' / 'fileAAA' + target2 = self.pathmod.join(TESTFN, 'fileA') + link2.hardlink_to(target2) + self.assertEqual(os.stat(target2).st_size, size) + self.assertTrue(link2.exists()) + + @unittest.skipIf(hasattr(os, "link"), "os.link() is present") + def test_hardlink_to_unsupported(self): + P = self.cls(self.base) + p = P / 'fileA' + # linking to another path. + q = P / 'dirA' / 'fileAA' + with self.assertRaises(pathlib.UnsupportedOperation): + q.hardlink_to(p) + + def test_rename(self): + P = self.cls(self.base) + p = P / 'fileA' + size = p.stat().st_size + # Renaming to another path. + q = P / 'dirA' / 'fileAA' + renamed_p = p.rename(q) + self.assertEqual(renamed_p, q) + self.assertEqual(q.stat().st_size, size) + self.assertFileNotFound(p.stat) + # Renaming to a str of a relative path. + r = self.pathmod.join(TESTFN, 'fileAAA') + renamed_q = q.rename(r) + self.assertEqual(renamed_q, self.cls(r)) + self.assertEqual(os.stat(r).st_size, size) + self.assertFileNotFound(q.stat) + + def test_replace(self): + P = self.cls(self.base) + p = P / 'fileA' + size = p.stat().st_size + # Replacing a non-existing path. + q = P / 'dirA' / 'fileAA' + replaced_p = p.replace(q) + self.assertEqual(replaced_p, q) + self.assertEqual(q.stat().st_size, size) + self.assertFileNotFound(p.stat) + # Replacing another (existing) path. + r = self.pathmod.join(TESTFN, 'dirB', 'fileB') + replaced_q = q.replace(r) + self.assertEqual(replaced_q, self.cls(r)) + self.assertEqual(os.stat(r).st_size, size) + self.assertFileNotFound(q.stat) + + def test_touch_common(self): + P = self.cls(self.base) + p = P / 'newfileA' + self.assertFalse(p.exists()) + p.touch() + self.assertTrue(p.exists()) + st = p.stat() + old_mtime = st.st_mtime + old_mtime_ns = st.st_mtime_ns + # Rewind the mtime sufficiently far in the past to work around + # filesystem-specific timestamp granularity. + os.utime(str(p), (old_mtime - 10, old_mtime - 10)) + # The file mtime should be refreshed by calling touch() again. + p.touch() + st = p.stat() + self.assertGreaterEqual(st.st_mtime_ns, old_mtime_ns) + self.assertGreaterEqual(st.st_mtime, old_mtime) + # Now with exist_ok=False. + p = P / 'newfileB' + self.assertFalse(p.exists()) + p.touch(mode=0o700, exist_ok=False) + self.assertTrue(p.exists()) + self.assertRaises(OSError, p.touch, exist_ok=False) + + def test_touch_nochange(self): + P = self.cls(self.base) + p = P / 'fileA' + p.touch() + with p.open('rb') as f: + self.assertEqual(f.read().strip(), b"this is file A") + + def test_mkdir(self): + P = self.cls(self.base) + p = P / 'newdirA' + self.assertFalse(p.exists()) + p.mkdir() + self.assertTrue(p.exists()) + self.assertTrue(p.is_dir()) + with self.assertRaises(OSError) as cm: + p.mkdir() + self.assertEqual(cm.exception.errno, errno.EEXIST) + + def test_mkdir_parents(self): + # Creating a chain of directories. + p = self.cls(self.base, 'newdirB', 'newdirC') + self.assertFalse(p.exists()) + with self.assertRaises(OSError) as cm: + p.mkdir() + self.assertEqual(cm.exception.errno, errno.ENOENT) + p.mkdir(parents=True) + self.assertTrue(p.exists()) + self.assertTrue(p.is_dir()) + with self.assertRaises(OSError) as cm: + p.mkdir(parents=True) + self.assertEqual(cm.exception.errno, errno.EEXIST) + # Test `mode` arg. + mode = stat.S_IMODE(p.stat().st_mode) # Default mode. + p = self.cls(self.base, 'newdirD', 'newdirE') + p.mkdir(0o555, parents=True) + self.assertTrue(p.exists()) + self.assertTrue(p.is_dir()) + if os.name != 'nt': + # The directory's permissions follow the mode argument. + self.assertEqual(stat.S_IMODE(p.stat().st_mode), 0o7555 & mode) + # The parent's permissions follow the default process settings. + self.assertEqual(stat.S_IMODE(p.parent.stat().st_mode), mode) + + def test_mkdir_exist_ok(self): + p = self.cls(self.base, 'dirB') + st_ctime_first = p.stat().st_ctime + self.assertTrue(p.exists()) + self.assertTrue(p.is_dir()) + with self.assertRaises(FileExistsError) as cm: + p.mkdir() + self.assertEqual(cm.exception.errno, errno.EEXIST) + p.mkdir(exist_ok=True) + self.assertTrue(p.exists()) + self.assertEqual(p.stat().st_ctime, st_ctime_first) + + def test_mkdir_exist_ok_with_parent(self): + p = self.cls(self.base, 'dirC') + self.assertTrue(p.exists()) + with self.assertRaises(FileExistsError) as cm: + p.mkdir() + self.assertEqual(cm.exception.errno, errno.EEXIST) + p = p / 'newdirC' + p.mkdir(parents=True) + st_ctime_first = p.stat().st_ctime + self.assertTrue(p.exists()) + with self.assertRaises(FileExistsError) as cm: + p.mkdir(parents=True) + self.assertEqual(cm.exception.errno, errno.EEXIST) + p.mkdir(parents=True, exist_ok=True) + self.assertTrue(p.exists()) + self.assertEqual(p.stat().st_ctime, st_ctime_first) + + @unittest.skipIf(is_emscripten, "FS root cannot be modified on Emscripten.") + def test_mkdir_exist_ok_root(self): + # Issue #25803: A drive root could raise PermissionError on Windows. + self.cls('/').resolve().mkdir(exist_ok=True) + self.cls('/').resolve().mkdir(parents=True, exist_ok=True) + + @only_nt # XXX: not sure how to test this on POSIX. + def test_mkdir_with_unknown_drive(self): + for d in 'ZYXWVUTSRQPONMLKJIHGFEDCBA': + p = self.cls(d + ':\\') + if not p.is_dir(): + break + else: + self.skipTest("cannot find a drive that doesn't exist") + with self.assertRaises(OSError): + (p / 'child' / 'path').mkdir(parents=True) + + def test_mkdir_with_child_file(self): + p = self.cls(self.base, 'dirB', 'fileB') + self.assertTrue(p.exists()) + # An exception is raised when the last path component is an existing + # regular file, regardless of whether exist_ok is true or not. + with self.assertRaises(FileExistsError) as cm: + p.mkdir(parents=True) + self.assertEqual(cm.exception.errno, errno.EEXIST) + with self.assertRaises(FileExistsError) as cm: + p.mkdir(parents=True, exist_ok=True) + self.assertEqual(cm.exception.errno, errno.EEXIST) + + def test_mkdir_no_parents_file(self): + p = self.cls(self.base, 'fileA') + self.assertTrue(p.exists()) + # An exception is raised when the last path component is an existing + # regular file, regardless of whether exist_ok is true or not. + with self.assertRaises(FileExistsError) as cm: + p.mkdir() + self.assertEqual(cm.exception.errno, errno.EEXIST) + with self.assertRaises(FileExistsError) as cm: + p.mkdir(exist_ok=True) + self.assertEqual(cm.exception.errno, errno.EEXIST) + + def test_mkdir_concurrent_parent_creation(self): + for pattern_num in range(32): + p = self.cls(self.base, 'dirCPC%d' % pattern_num) + self.assertFalse(p.exists()) + + real_mkdir = os.mkdir + def my_mkdir(path, mode=0o777): + path = str(path) + # Emulate another process that would create the directory + # just before we try to create it ourselves. We do it + # in all possible pattern combinations, assuming that this + # function is called at most 5 times (dirCPC/dir1/dir2, + # dirCPC/dir1, dirCPC, dirCPC/dir1, dirCPC/dir1/dir2). + if pattern.pop(): + real_mkdir(path, mode) # From another process. + concurrently_created.add(path) + real_mkdir(path, mode) # Our real call. + + pattern = [bool(pattern_num & (1 << n)) for n in range(5)] + concurrently_created = set() + p12 = p / 'dir1' / 'dir2' + try: + with mock.patch("os.mkdir", my_mkdir): + p12.mkdir(parents=True, exist_ok=False) + except FileExistsError: + self.assertIn(str(p12), concurrently_created) + else: + self.assertNotIn(str(p12), concurrently_created) + self.assertTrue(p.exists()) + + def test_symlink_to(self): + if not self.can_symlink: + self.skipTest("symlinks required") + P = self.cls(self.base) + target = P / 'fileA' + # Symlinking a path target. + link = P / 'dirA' / 'linkAA' + link.symlink_to(target) + self.assertEqual(link.stat(), target.stat()) + self.assertNotEqual(link.lstat(), target.stat()) + # Symlinking a str target. + link = P / 'dirA' / 'linkAAA' + link.symlink_to(str(target)) + self.assertEqual(link.stat(), target.stat()) + self.assertNotEqual(link.lstat(), target.stat()) + self.assertFalse(link.is_dir()) + # Symlinking to a directory. + target = P / 'dirB' + link = P / 'dirA' / 'linkAAAA' + link.symlink_to(target, target_is_directory=True) + self.assertEqual(link.stat(), target.stat()) + self.assertNotEqual(link.lstat(), target.stat()) + self.assertTrue(link.is_dir()) + self.assertTrue(list(link.iterdir())) + + @unittest.skipIf(hasattr(os, "symlink"), "os.symlink() is present") + def test_symlink_to_unsupported(self): + P = self.cls(self.base) + p = P / 'fileA' + # linking to another path. + q = P / 'dirA' / 'fileAA' + with self.assertRaises(pathlib.UnsupportedOperation): + q.symlink_to(p) + + def test_is_junction(self): + P = self.cls(self.base) + + with mock.patch.object(P.pathmod, 'isjunction'): + self.assertEqual(P.is_junction(), P.pathmod.isjunction.return_value) + P.pathmod.isjunction.assert_called_once_with(P) + + @unittest.skipUnless(hasattr(os, "mkfifo"), "os.mkfifo() required") + @unittest.skipIf(sys.platform == "vxworks", + "fifo requires special path on VxWorks") + def test_is_fifo_true(self): + P = self.cls(self.base, 'myfifo') + try: + os.mkfifo(str(P)) + except PermissionError as e: + self.skipTest('os.mkfifo(): %s' % e) + self.assertTrue(P.is_fifo()) + self.assertFalse(P.is_socket()) + self.assertFalse(P.is_file()) + self.assertIs(self.cls(self.base, 'myfifo\udfff').is_fifo(), False) + self.assertIs(self.cls(self.base, 'myfifo\x00').is_fifo(), False) + + @unittest.skipUnless(hasattr(socket, "AF_UNIX"), "Unix sockets required") + @unittest.skipIf( + is_emscripten, "Unix sockets are not implemented on Emscripten." + ) + @unittest.skipIf( + is_wasi, "Cannot create socket on WASI." + ) + def test_is_socket_true(self): + P = self.cls(self.base, 'mysock') + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + self.addCleanup(sock.close) + try: + sock.bind(str(P)) + except OSError as e: + if (isinstance(e, PermissionError) or + "AF_UNIX path too long" in str(e)): + self.skipTest("cannot bind Unix socket: " + str(e)) + self.assertTrue(P.is_socket()) + self.assertFalse(P.is_fifo()) + self.assertFalse(P.is_file()) + self.assertIs(self.cls(self.base, 'mysock\udfff').is_socket(), False) + self.assertIs(self.cls(self.base, 'mysock\x00').is_socket(), False) + + def test_is_char_device_true(self): + # Under Unix, /dev/null should generally be a char device. + P = self.cls('/dev/null') + if not P.exists(): + self.skipTest("/dev/null required") + self.assertTrue(P.is_char_device()) + self.assertFalse(P.is_block_device()) + self.assertFalse(P.is_file()) + self.assertIs(self.cls('/dev/null\udfff').is_char_device(), False) + self.assertIs(self.cls('/dev/null\x00').is_char_device(), False) + + def test_is_mount_root(self): + if os.name == 'nt': + R = self.cls('c:\\') + else: + R = self.cls('/') + self.assertTrue(R.is_mount()) + self.assertFalse((R / '\udfff').is_mount()) + + def test_passing_kwargs_deprecated(self): + with self.assertWarns(DeprecationWarning): + self.cls(foo="bar") + + def setUpWalk(self): + super().setUpWalk() + sub21_path= self.sub2_path / "SUB21" + tmp5_path = sub21_path / "tmp3" + broken_link3_path = self.sub2_path / "broken_link3" + + os.makedirs(sub21_path) + tmp5_path.write_text("I am tmp5, blame test_pathlib.") + if self.can_symlink: + os.symlink(tmp5_path, broken_link3_path) + self.sub2_tree[2].append('broken_link3') + self.sub2_tree[2].sort() + if not is_emscripten: + # Emscripten fails with inaccessible directories. + os.chmod(sub21_path, 0) + try: + os.listdir(sub21_path) + except PermissionError: + self.sub2_tree[1].append('SUB21') + else: + os.chmod(sub21_path, stat.S_IRWXU) + os.unlink(tmp5_path) + os.rmdir(sub21_path) + + def test_walk_bad_dir(self): + self.setUpWalk() + errors = [] + walk_it = self.walk_path.walk(on_error=errors.append) + root, dirs, files = next(walk_it) + self.assertEqual(errors, []) + dir1 = 'SUB1' + path1 = root / dir1 + path1new = (root / dir1).with_suffix(".new") + path1.rename(path1new) + try: + roots = [r for r, _, _ in walk_it] + self.assertTrue(errors) + self.assertNotIn(path1, roots) + self.assertNotIn(path1new, roots) + for dir2 in dirs: + if dir2 != dir1: + self.assertIn(root / dir2, roots) + finally: + path1new.rename(path1) + + def test_walk_many_open_files(self): + depth = 30 + base = self.cls(self.base, 'deep') + path = self.cls(base, *(['d']*depth)) + path.mkdir(parents=True) + + iters = [base.walk(top_down=False) for _ in range(100)] + for i in range(depth + 1): + expected = (path, ['d'] if i else [], []) + for it in iters: + self.assertEqual(next(it), expected) + path = path.parent + + iters = [base.walk(top_down=True) for _ in range(100)] + path = base + for i in range(depth + 1): + expected = (path, ['d'] if i < depth else [], []) + for it in iters: + self.assertEqual(next(it), expected) + path = path / 'd' + + +@only_posix +class PosixPathTest(PathTest, PurePosixPathTest): + cls = pathlib.PosixPath + + def test_absolute(self): + P = self.cls + self.assertEqual(str(P('/').absolute()), '/') + self.assertEqual(str(P('/a').absolute()), '/a') + self.assertEqual(str(P('/a/b').absolute()), '/a/b') + + # '//'-prefixed absolute path (supported by POSIX). + self.assertEqual(str(P('//').absolute()), '//') + self.assertEqual(str(P('//a').absolute()), '//a') + self.assertEqual(str(P('//a/b').absolute()), '//a/b') + + @unittest.skipIf( + is_emscripten or is_wasi, + "umask is not implemented on Emscripten/WASI." + ) + def test_open_mode(self): + old_mask = os.umask(0) + self.addCleanup(os.umask, old_mask) + p = self.cls(self.base) + with (p / 'new_file').open('wb'): + pass + st = os.stat(self.pathmod.join(self.base, 'new_file')) + self.assertEqual(stat.S_IMODE(st.st_mode), 0o666) + os.umask(0o022) + with (p / 'other_new_file').open('wb'): + pass + st = os.stat(self.pathmod.join(self.base, 'other_new_file')) + self.assertEqual(stat.S_IMODE(st.st_mode), 0o644) + + def test_resolve_root(self): + current_directory = os.getcwd() + try: + os.chdir('/') + p = self.cls('spam') + self.assertEqual(str(p.resolve()), '/spam') + finally: + os.chdir(current_directory) + + @unittest.skipIf( + is_emscripten or is_wasi, + "umask is not implemented on Emscripten/WASI." + ) + def test_touch_mode(self): + old_mask = os.umask(0) + self.addCleanup(os.umask, old_mask) + p = self.cls(self.base) + (p / 'new_file').touch() + st = os.stat(self.pathmod.join(self.base, 'new_file')) + self.assertEqual(stat.S_IMODE(st.st_mode), 0o666) + os.umask(0o022) + (p / 'other_new_file').touch() + st = os.stat(self.pathmod.join(self.base, 'other_new_file')) + self.assertEqual(stat.S_IMODE(st.st_mode), 0o644) + (p / 'masked_new_file').touch(mode=0o750) + st = os.stat(self.pathmod.join(self.base, 'masked_new_file')) + self.assertEqual(stat.S_IMODE(st.st_mode), 0o750) + + def test_glob(self): + P = self.cls + p = P(self.base) + given = set(p.glob("FILEa")) + expect = set() if not os_helper.fs_is_case_insensitive(self.base) else given + self.assertEqual(given, expect) + self.assertEqual(set(p.glob("FILEa*")), set()) + + def test_rglob(self): + P = self.cls + p = P(self.base, "dirC") + given = set(p.rglob("FILEd")) + expect = set() if not os_helper.fs_is_case_insensitive(self.base) else given + self.assertEqual(given, expect) + self.assertEqual(set(p.rglob("FILEd*")), set()) + + @unittest.skipUnless(hasattr(pwd, 'getpwall'), + 'pwd module does not expose getpwall()') + @unittest.skipIf(sys.platform == "vxworks", + "no home directory on VxWorks") + def test_expanduser(self): + P = self.cls + import_helper.import_module('pwd') + import pwd + pwdent = pwd.getpwuid(os.getuid()) + username = pwdent.pw_name + userhome = pwdent.pw_dir.rstrip('/') or '/' + # Find arbitrary different user (if exists). + for pwdent in pwd.getpwall(): + othername = pwdent.pw_name + otherhome = pwdent.pw_dir.rstrip('/') + if othername != username and otherhome: + break + else: + othername = username + otherhome = userhome + + fakename = 'fakeuser' + # This user can theoretically exist on a test runner. Create unique name: + try: + while pwd.getpwnam(fakename): + fakename += '1' + except KeyError: + pass # Non-existent name found + + p1 = P('~/Documents') + p2 = P(f'~{username}/Documents') + p3 = P(f'~{othername}/Documents') + p4 = P(f'../~{username}/Documents') + p5 = P(f'/~{username}/Documents') + p6 = P('') + p7 = P(f'~{fakename}/Documents') + + with os_helper.EnvironmentVarGuard() as env: + env.pop('HOME', None) + + self.assertEqual(p1.expanduser(), P(userhome) / 'Documents') + self.assertEqual(p2.expanduser(), P(userhome) / 'Documents') + self.assertEqual(p3.expanduser(), P(otherhome) / 'Documents') + self.assertEqual(p4.expanduser(), p4) + self.assertEqual(p5.expanduser(), p5) + self.assertEqual(p6.expanduser(), p6) + self.assertRaises(RuntimeError, p7.expanduser) + + env['HOME'] = '/tmp' + self.assertEqual(p1.expanduser(), P('/tmp/Documents')) + self.assertEqual(p2.expanduser(), P(userhome) / 'Documents') + self.assertEqual(p3.expanduser(), P(otherhome) / 'Documents') + self.assertEqual(p4.expanduser(), p4) + self.assertEqual(p5.expanduser(), p5) + self.assertEqual(p6.expanduser(), p6) + self.assertRaises(RuntimeError, p7.expanduser) + + @unittest.skipIf(sys.platform != "darwin", + "Bad file descriptor in /dev/fd affects only macOS") + def test_handling_bad_descriptor(self): + try: + file_descriptors = list(pathlib.Path('/dev/fd').rglob("*"))[3:] + if not file_descriptors: + self.skipTest("no file descriptors - issue was not reproduced") + # Checking all file descriptors because there is no guarantee + # which one will fail. + for f in file_descriptors: + f.exists() + f.is_dir() + f.is_file() + f.is_symlink() + f.is_block_device() + f.is_char_device() + f.is_fifo() + f.is_socket() + except OSError as e: + if e.errno == errno.EBADF: + self.fail("Bad file descriptor not handled.") + raise + + def test_from_uri(self): + P = self.cls + self.assertEqual(P.from_uri('file:/foo/bar'), P('/foo/bar')) + self.assertEqual(P.from_uri('file://foo/bar'), P('//foo/bar')) + self.assertEqual(P.from_uri('file:///foo/bar'), P('/foo/bar')) + self.assertEqual(P.from_uri('file:////foo/bar'), P('//foo/bar')) + self.assertEqual(P.from_uri('file://localhost/foo/bar'), P('/foo/bar')) + self.assertRaises(ValueError, P.from_uri, 'foo/bar') + self.assertRaises(ValueError, P.from_uri, '/foo/bar') + self.assertRaises(ValueError, P.from_uri, '//foo/bar') + self.assertRaises(ValueError, P.from_uri, 'file:foo/bar') + self.assertRaises(ValueError, P.from_uri, 'http://foo/bar') + + def test_from_uri_pathname2url(self): + P = self.cls + self.assertEqual(P.from_uri('file:' + pathname2url('/foo/bar')), P('/foo/bar')) + self.assertEqual(P.from_uri('file:' + pathname2url('//foo/bar')), P('//foo/bar')) + + +@only_nt +class WindowsPathTest(PathTest, PureWindowsPathTest): + cls = pathlib.WindowsPath + + def test_absolute(self): + P = self.cls + + # Simple absolute paths. + self.assertEqual(str(P('c:\\').absolute()), 'c:\\') + self.assertEqual(str(P('c:\\a').absolute()), 'c:\\a') + self.assertEqual(str(P('c:\\a\\b').absolute()), 'c:\\a\\b') + + # UNC absolute paths. + share = '\\\\server\\share\\' + self.assertEqual(str(P(share).absolute()), share) + self.assertEqual(str(P(share + 'a').absolute()), share + 'a') + self.assertEqual(str(P(share + 'a\\b').absolute()), share + 'a\\b') + + # UNC relative paths. + with mock.patch("os.getcwd") as getcwd: + getcwd.return_value = share + + self.assertEqual(str(P().absolute()), share) + self.assertEqual(str(P('.').absolute()), share) + self.assertEqual(str(P('a').absolute()), os.path.join(share, 'a')) + self.assertEqual(str(P('a', 'b', 'c').absolute()), + os.path.join(share, 'a', 'b', 'c')) + + drive = os.path.splitdrive(self.base)[0] + with os_helper.change_cwd(self.base): + # Relative path with root + self.assertEqual(str(P('\\').absolute()), drive + '\\') + self.assertEqual(str(P('\\foo').absolute()), drive + '\\foo') + + # Relative path on current drive + self.assertEqual(str(P(drive).absolute()), self.base) + self.assertEqual(str(P(drive + 'foo').absolute()), os.path.join(self.base, 'foo')) + + with os_helper.subst_drive(self.base) as other_drive: + # Set the working directory on the substitute drive + saved_cwd = os.getcwd() + other_cwd = f'{other_drive}\\dirA' + os.chdir(other_cwd) + os.chdir(saved_cwd) + + # Relative path on another drive + self.assertEqual(str(P(other_drive).absolute()), other_cwd) + self.assertEqual(str(P(other_drive + 'foo').absolute()), other_cwd + '\\foo') + + def test_glob(self): + P = self.cls + p = P(self.base) + self.assertEqual(set(p.glob("FILEa")), { P(self.base, "fileA") }) + self.assertEqual(set(p.glob("*a\\")), { P(self.base, "dirA/") }) + self.assertEqual(set(p.glob("F*a")), { P(self.base, "fileA") }) + self.assertEqual(set(map(str, p.glob("FILEa"))), {f"{p}\\fileA"}) + self.assertEqual(set(map(str, p.glob("F*a"))), {f"{p}\\fileA"}) + + def test_rglob(self): + P = self.cls + p = P(self.base, "dirC") + self.assertEqual(set(p.rglob("FILEd")), { P(self.base, "dirC/dirD/fileD") }) + self.assertEqual(set(p.rglob("*\\")), { P(self.base, "dirC/dirD/") }) + self.assertEqual(set(map(str, p.rglob("FILEd"))), {f"{p}\\dirD\\fileD"}) + + def test_expanduser(self): + P = self.cls + with os_helper.EnvironmentVarGuard() as env: + env.pop('HOME', None) + env.pop('USERPROFILE', None) + env.pop('HOMEPATH', None) + env.pop('HOMEDRIVE', None) + env['USERNAME'] = 'alice' + + # test that the path returns unchanged + p1 = P('~/My Documents') + p2 = P('~alice/My Documents') + p3 = P('~bob/My Documents') + p4 = P('/~/My Documents') + p5 = P('d:~/My Documents') + p6 = P('') + self.assertRaises(RuntimeError, p1.expanduser) + self.assertRaises(RuntimeError, p2.expanduser) + self.assertRaises(RuntimeError, p3.expanduser) + self.assertEqual(p4.expanduser(), p4) + self.assertEqual(p5.expanduser(), p5) + self.assertEqual(p6.expanduser(), p6) + + def check(): + env.pop('USERNAME', None) + self.assertEqual(p1.expanduser(), + P('C:/Users/alice/My Documents')) + self.assertRaises(RuntimeError, p2.expanduser) + env['USERNAME'] = 'alice' + self.assertEqual(p2.expanduser(), + P('C:/Users/alice/My Documents')) + self.assertEqual(p3.expanduser(), + P('C:/Users/bob/My Documents')) + self.assertEqual(p4.expanduser(), p4) + self.assertEqual(p5.expanduser(), p5) + self.assertEqual(p6.expanduser(), p6) + + env['HOMEPATH'] = 'C:\\Users\\alice' + check() + + env['HOMEDRIVE'] = 'C:\\' + env['HOMEPATH'] = 'Users\\alice' + check() + + env.pop('HOMEDRIVE', None) + env.pop('HOMEPATH', None) + env['USERPROFILE'] = 'C:\\Users\\alice' + check() + + # bpo-38883: ignore `HOME` when set on windows + env['HOME'] = 'C:\\Users\\eve' + check() + + def test_from_uri(self): + P = self.cls + # DOS drive paths + self.assertEqual(P.from_uri('file:c:/path/to/file'), P('c:/path/to/file')) + self.assertEqual(P.from_uri('file:c|/path/to/file'), P('c:/path/to/file')) + self.assertEqual(P.from_uri('file:/c|/path/to/file'), P('c:/path/to/file')) + self.assertEqual(P.from_uri('file:///c|/path/to/file'), P('c:/path/to/file')) + # UNC paths + self.assertEqual(P.from_uri('file://server/path/to/file'), P('//server/path/to/file')) + self.assertEqual(P.from_uri('file:////server/path/to/file'), P('//server/path/to/file')) + self.assertEqual(P.from_uri('file://///server/path/to/file'), P('//server/path/to/file')) + # Localhost paths + self.assertEqual(P.from_uri('file://localhost/c:/path/to/file'), P('c:/path/to/file')) + self.assertEqual(P.from_uri('file://localhost/c|/path/to/file'), P('c:/path/to/file')) + # Invalid paths + self.assertRaises(ValueError, P.from_uri, 'foo/bar') + self.assertRaises(ValueError, P.from_uri, 'c:/foo/bar') + self.assertRaises(ValueError, P.from_uri, '//foo/bar') + self.assertRaises(ValueError, P.from_uri, 'file:foo/bar') + self.assertRaises(ValueError, P.from_uri, 'http://foo/bar') + + def test_from_uri_pathname2url(self): + P = self.cls + self.assertEqual(P.from_uri('file:' + pathname2url(r'c:\path\to\file')), P('c:/path/to/file')) + self.assertEqual(P.from_uri('file:' + pathname2url(r'\\server\path\to\file')), P('//server/path/to/file')) + + def test_owner(self): + P = self.cls + with self.assertRaises(pathlib.UnsupportedOperation): + P('c:/').owner() + + def test_group(self): + P = self.cls + with self.assertRaises(pathlib.UnsupportedOperation): + P('c:/').group() + + +class PathSubclassTest(PathTest): + class cls(pathlib.Path): + pass + + # repr() roundtripping is not supported in custom subclass. + test_repr_roundtrips = None + + +class CompatiblePathTest(unittest.TestCase): + """ + Test that a type can be made compatible with PurePath + derivatives by implementing division operator overloads. + """ + + class CompatPath: + """ + Minimum viable class to test PurePath compatibility. + Simply uses the division operator to join a given + string and the string value of another object with + a forward slash. + """ + def __init__(self, string): + self.string = string + + def __truediv__(self, other): + return type(self)(f"{self.string}/{other}") + + def __rtruediv__(self, other): + return type(self)(f"{other}/{self.string}") + + def test_truediv(self): + result = pathlib.PurePath("test") / self.CompatPath("right") + self.assertIsInstance(result, self.CompatPath) + self.assertEqual(result.string, "test/right") + + with self.assertRaises(TypeError): + # Verify improper operations still raise a TypeError + pathlib.PurePath("test") / 10 + + def test_rtruediv(self): + result = self.CompatPath("left") / pathlib.PurePath("test") + self.assertIsInstance(result, self.CompatPath) + self.assertEqual(result.string, "left/test") + + with self.assertRaises(TypeError): + # Verify improper operations still raise a TypeError + 10 / pathlib.PurePath("test") + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_pathlib/test_pathlib_abc.py b/Lib/test/test_pathlib/test_pathlib_abc.py new file mode 100644 index 00000000000000..a272973d9c1d61 --- /dev/null +++ b/Lib/test/test_pathlib/test_pathlib_abc.py @@ -0,0 +1,1911 @@ +import collections.abc +import io +import os +import errno +import pathlib +import pickle +import posixpath +import stat +import unittest + +from test.support import set_recursion_limit +from test.support.os_helper import TESTFN + + +class UnsupportedOperationTest(unittest.TestCase): + def test_is_notimplemented(self): + self.assertTrue(issubclass(pathlib.UnsupportedOperation, NotImplementedError)) + self.assertTrue(isinstance(pathlib.UnsupportedOperation(), NotImplementedError)) + + +# +# Tests for the pure classes. +# + + +class PurePathBaseTest(unittest.TestCase): + cls = pathlib._abc.PurePathBase + + def test_magic_methods(self): + P = self.cls + self.assertFalse(hasattr(P, '__fspath__')) + self.assertFalse(hasattr(P, '__bytes__')) + self.assertIs(P.__reduce__, object.__reduce__) + self.assertIs(P.__hash__, object.__hash__) + self.assertIs(P.__eq__, object.__eq__) + self.assertIs(P.__lt__, object.__lt__) + self.assertIs(P.__le__, object.__le__) + self.assertIs(P.__gt__, object.__gt__) + self.assertIs(P.__ge__, object.__ge__) + + +class DummyPurePath(pathlib._abc.PurePathBase): + def __eq__(self, other): + if not isinstance(other, DummyPurePath): + return NotImplemented + return str(self) == str(other) + + def __hash__(self): + return hash(str(self)) + + +class DummyPurePathTest(unittest.TestCase): + cls = DummyPurePath + + # Make sure any symbolic links in the base test path are resolved. + base = os.path.realpath(TESTFN) + + # Keys are canonical paths, values are list of tuples of arguments + # supposed to produce equal paths. + equivalences = { + 'a/b': [ + ('a', 'b'), ('a/', 'b'), ('a', 'b/'), ('a/', 'b/'), + ('a/b/',), ('a//b',), ('a//b//',), + # Empty components get removed. + ('', 'a', 'b'), ('a', '', 'b'), ('a', 'b', ''), + ], + '/b/c/d': [ + ('a', '/b/c', 'd'), ('/a', '/b/c', 'd'), + # Empty components get removed. + ('/', 'b', '', 'c/d'), ('/', '', 'b/c/d'), ('', '/b/c/d'), + ], + } + + def setUp(self): + p = self.cls('a') + self.pathmod = p.pathmod + self.sep = self.pathmod.sep + self.altsep = self.pathmod.altsep + + def test_constructor_common(self): + P = self.cls + p = P('a') + self.assertIsInstance(p, P) + P('a', 'b', 'c') + P('/a', 'b', 'c') + P('a/b/c') + P('/a/b/c') + + def test_concrete_class(self): + if self.cls is pathlib.PurePath: + expected = pathlib.PureWindowsPath if os.name == 'nt' else pathlib.PurePosixPath + else: + expected = self.cls + p = self.cls('a') + self.assertIs(type(p), expected) + + def test_different_pathmods_unequal(self): + p = self.cls('a') + if p.pathmod is posixpath: + q = pathlib.PureWindowsPath('a') + else: + q = pathlib.PurePosixPath('a') + self.assertNotEqual(p, q) + + def test_different_pathmods_unordered(self): + p = self.cls('a') + if p.pathmod is posixpath: + q = pathlib.PureWindowsPath('a') + else: + q = pathlib.PurePosixPath('a') + with self.assertRaises(TypeError): + p < q + with self.assertRaises(TypeError): + p <= q + with self.assertRaises(TypeError): + p > q + with self.assertRaises(TypeError): + p >= q + + def _check_str_subclass(self, *args): + # Issue #21127: it should be possible to construct a PurePath object + # from a str subclass instance, and it then gets converted to + # a pure str object. + class StrSubclass(str): + pass + P = self.cls + p = P(*(StrSubclass(x) for x in args)) + self.assertEqual(p, P(*args)) + for part in p.parts: + self.assertIs(type(part), str) + + def test_str_subclass_common(self): + self._check_str_subclass('') + self._check_str_subclass('.') + self._check_str_subclass('a') + self._check_str_subclass('a/b.txt') + self._check_str_subclass('/a/b.txt') + + def test_with_segments_common(self): + class P(self.cls): + def __init__(self, *pathsegments, session_id): + super().__init__(*pathsegments) + self.session_id = session_id + + def with_segments(self, *pathsegments): + return type(self)(*pathsegments, session_id=self.session_id) + p = P('foo', 'bar', session_id=42) + self.assertEqual(42, (p / 'foo').session_id) + self.assertEqual(42, ('foo' / p).session_id) + self.assertEqual(42, p.joinpath('foo').session_id) + self.assertEqual(42, p.with_name('foo').session_id) + self.assertEqual(42, p.with_stem('foo').session_id) + self.assertEqual(42, p.with_suffix('.foo').session_id) + self.assertEqual(42, p.with_segments('foo').session_id) + self.assertEqual(42, p.relative_to('foo').session_id) + self.assertEqual(42, p.parent.session_id) + for parent in p.parents: + self.assertEqual(42, parent.session_id) + + def _check_parse_path(self, raw_path, *expected): + sep = self.pathmod.sep + actual = self.cls._parse_path(raw_path.replace('/', sep)) + self.assertEqual(actual, expected) + if altsep := self.pathmod.altsep: + actual = self.cls._parse_path(raw_path.replace('/', altsep)) + self.assertEqual(actual, expected) + + def test_parse_path_common(self): + check = self._check_parse_path + sep = self.pathmod.sep + check('', '', '', []) + check('a', '', '', ['a']) + check('a/', '', '', ['a']) + check('a/b', '', '', ['a', 'b']) + check('a/b/', '', '', ['a', 'b']) + check('a/b/c/d', '', '', ['a', 'b', 'c', 'd']) + check('a/b//c/d', '', '', ['a', 'b', 'c', 'd']) + check('a/b/c/d', '', '', ['a', 'b', 'c', 'd']) + check('.', '', '', []) + check('././b', '', '', ['b']) + check('a/./b', '', '', ['a', 'b']) + check('a/./.', '', '', ['a']) + check('/a/b', '', sep, ['a', 'b']) + + def test_join_common(self): + P = self.cls + p = P('a/b') + pp = p.joinpath('c') + self.assertEqual(pp, P('a/b/c')) + self.assertIs(type(pp), type(p)) + pp = p.joinpath('c', 'd') + self.assertEqual(pp, P('a/b/c/d')) + pp = p.joinpath('/c') + self.assertEqual(pp, P('/c')) + + def test_div_common(self): + # Basically the same as joinpath(). + P = self.cls + p = P('a/b') + pp = p / 'c' + self.assertEqual(pp, P('a/b/c')) + self.assertIs(type(pp), type(p)) + pp = p / 'c/d' + self.assertEqual(pp, P('a/b/c/d')) + pp = p / 'c' / 'd' + self.assertEqual(pp, P('a/b/c/d')) + pp = 'c' / p / 'd' + self.assertEqual(pp, P('c/a/b/d')) + pp = p/ '/c' + self.assertEqual(pp, P('/c')) + + def _check_str(self, expected, args): + p = self.cls(*args) + self.assertEqual(str(p), expected.replace('/', self.sep)) + + def test_str_common(self): + # Canonicalized paths roundtrip. + for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'): + self._check_str(pathstr, (pathstr,)) + # Special case for the empty path. + self._check_str('.', ('',)) + # Other tests for str() are in test_equivalences(). + + def test_as_posix_common(self): + P = self.cls + for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'): + self.assertEqual(P(pathstr).as_posix(), pathstr) + # Other tests for as_posix() are in test_equivalences(). + + def test_repr_common(self): + for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'): + with self.subTest(pathstr=pathstr): + p = self.cls(pathstr) + clsname = p.__class__.__name__ + r = repr(p) + # The repr() is in the form ClassName("forward-slashes path"). + self.assertTrue(r.startswith(clsname + '('), r) + self.assertTrue(r.endswith(')'), r) + inner = r[len(clsname) + 1 : -1] + self.assertEqual(eval(inner), p.as_posix()) + + def test_eq_common(self): + P = self.cls + self.assertEqual(P('a/b'), P('a/b')) + self.assertEqual(P('a/b'), P('a', 'b')) + self.assertNotEqual(P('a/b'), P('a')) + self.assertNotEqual(P('a/b'), P('/a/b')) + self.assertNotEqual(P('a/b'), P()) + self.assertNotEqual(P('/a/b'), P('/')) + self.assertNotEqual(P(), P('/')) + self.assertNotEqual(P(), "") + self.assertNotEqual(P(), {}) + self.assertNotEqual(P(), int) + + def test_match_common(self): + P = self.cls + self.assertRaises(ValueError, P('a').match, '') + self.assertRaises(ValueError, P('a').match, '.') + # Simple relative pattern. + self.assertTrue(P('b.py').match('b.py')) + self.assertTrue(P('a/b.py').match('b.py')) + self.assertTrue(P('/a/b.py').match('b.py')) + self.assertFalse(P('a.py').match('b.py')) + self.assertFalse(P('b/py').match('b.py')) + self.assertFalse(P('/a.py').match('b.py')) + self.assertFalse(P('b.py/c').match('b.py')) + # Wildcard relative pattern. + self.assertTrue(P('b.py').match('*.py')) + self.assertTrue(P('a/b.py').match('*.py')) + self.assertTrue(P('/a/b.py').match('*.py')) + self.assertFalse(P('b.pyc').match('*.py')) + self.assertFalse(P('b./py').match('*.py')) + self.assertFalse(P('b.py/c').match('*.py')) + # Multi-part relative pattern. + self.assertTrue(P('ab/c.py').match('a*/*.py')) + self.assertTrue(P('/d/ab/c.py').match('a*/*.py')) + self.assertFalse(P('a.py').match('a*/*.py')) + self.assertFalse(P('/dab/c.py').match('a*/*.py')) + self.assertFalse(P('ab/c.py/d').match('a*/*.py')) + # Absolute pattern. + self.assertTrue(P('/b.py').match('/*.py')) + self.assertFalse(P('b.py').match('/*.py')) + self.assertFalse(P('a/b.py').match('/*.py')) + self.assertFalse(P('/a/b.py').match('/*.py')) + # Multi-part absolute pattern. + self.assertTrue(P('/a/b.py').match('/a/*.py')) + self.assertFalse(P('/ab.py').match('/a/*.py')) + self.assertFalse(P('/a/b/c.py').match('/a/*.py')) + # Multi-part glob-style pattern. + self.assertTrue(P('a').match('**')) + self.assertTrue(P('c.py').match('**')) + self.assertTrue(P('a/b/c.py').match('**')) + self.assertTrue(P('/a/b/c.py').match('**')) + self.assertTrue(P('/a/b/c.py').match('/**')) + self.assertTrue(P('/a/b/c.py').match('**/')) + self.assertTrue(P('/a/b/c.py').match('/a/**')) + self.assertTrue(P('/a/b/c.py').match('**/*.py')) + self.assertTrue(P('/a/b/c.py').match('/**/*.py')) + self.assertTrue(P('/a/b/c.py').match('/a/**/*.py')) + self.assertTrue(P('/a/b/c.py').match('/a/b/**/*.py')) + self.assertTrue(P('/a/b/c.py').match('/**/**/**/**/*.py')) + self.assertFalse(P('c.py').match('**/a.py')) + self.assertFalse(P('c.py').match('c/**')) + self.assertFalse(P('a/b/c.py').match('**/a')) + self.assertFalse(P('a/b/c.py').match('**/a/b')) + self.assertFalse(P('a/b/c.py').match('**/a/b/c')) + self.assertFalse(P('a/b/c.py').match('**/a/b/c.')) + self.assertFalse(P('a/b/c.py').match('**/a/b/c./**')) + self.assertFalse(P('a/b/c.py').match('**/a/b/c./**')) + self.assertFalse(P('a/b/c.py').match('/a/b/c.py/**')) + self.assertFalse(P('a/b/c.py').match('/**/a/b/c.py')) + self.assertRaises(ValueError, P('a').match, '**a/b/c') + self.assertRaises(ValueError, P('a').match, 'a/b/c**') + # Case-sensitive flag + self.assertFalse(P('A.py').match('a.PY', case_sensitive=True)) + self.assertTrue(P('A.py').match('a.PY', case_sensitive=False)) + self.assertFalse(P('c:/a/B.Py').match('C:/A/*.pY', case_sensitive=True)) + self.assertTrue(P('/a/b/c.py').match('/A/*/*.Py', case_sensitive=False)) + # Matching against empty path + self.assertFalse(P().match('*')) + self.assertTrue(P().match('**')) + self.assertFalse(P().match('**/*')) + + def test_parts_common(self): + # `parts` returns a tuple. + sep = self.sep + P = self.cls + p = P('a/b') + parts = p.parts + self.assertEqual(parts, ('a', 'b')) + # When the path is absolute, the anchor is a separate part. + p = P('/a/b') + parts = p.parts + self.assertEqual(parts, (sep, 'a', 'b')) + + def test_equivalences(self): + for k, tuples in self.equivalences.items(): + canon = k.replace('/', self.sep) + posix = k.replace(self.sep, '/') + if canon != posix: + tuples = tuples + [ + tuple(part.replace('/', self.sep) for part in t) + for t in tuples + ] + tuples.append((posix, )) + pcanon = self.cls(canon) + for t in tuples: + p = self.cls(*t) + self.assertEqual(p, pcanon, "failed with args {}".format(t)) + self.assertEqual(hash(p), hash(pcanon)) + self.assertEqual(str(p), canon) + self.assertEqual(p.as_posix(), posix) + + def test_parent_common(self): + # Relative + P = self.cls + p = P('a/b/c') + self.assertEqual(p.parent, P('a/b')) + self.assertEqual(p.parent.parent, P('a')) + self.assertEqual(p.parent.parent.parent, P()) + self.assertEqual(p.parent.parent.parent.parent, P()) + # Anchored + p = P('/a/b/c') + self.assertEqual(p.parent, P('/a/b')) + self.assertEqual(p.parent.parent, P('/a')) + self.assertEqual(p.parent.parent.parent, P('/')) + self.assertEqual(p.parent.parent.parent.parent, P('/')) + + def test_parents_common(self): + # Relative + P = self.cls + p = P('a/b/c') + par = p.parents + self.assertEqual(len(par), 3) + self.assertEqual(par[0], P('a/b')) + self.assertEqual(par[1], P('a')) + self.assertEqual(par[2], P('.')) + self.assertEqual(par[-1], P('.')) + self.assertEqual(par[-2], P('a')) + self.assertEqual(par[-3], P('a/b')) + self.assertEqual(par[0:1], (P('a/b'),)) + self.assertEqual(par[:2], (P('a/b'), P('a'))) + self.assertEqual(par[:-1], (P('a/b'), P('a'))) + self.assertEqual(par[1:], (P('a'), P('.'))) + self.assertEqual(par[::2], (P('a/b'), P('.'))) + self.assertEqual(par[::-1], (P('.'), P('a'), P('a/b'))) + self.assertEqual(list(par), [P('a/b'), P('a'), P('.')]) + with self.assertRaises(IndexError): + par[-4] + with self.assertRaises(IndexError): + par[3] + with self.assertRaises(TypeError): + par[0] = p + # Anchored + p = P('/a/b/c') + par = p.parents + self.assertEqual(len(par), 3) + self.assertEqual(par[0], P('/a/b')) + self.assertEqual(par[1], P('/a')) + self.assertEqual(par[2], P('/')) + self.assertEqual(par[-1], P('/')) + self.assertEqual(par[-2], P('/a')) + self.assertEqual(par[-3], P('/a/b')) + self.assertEqual(par[0:1], (P('/a/b'),)) + self.assertEqual(par[:2], (P('/a/b'), P('/a'))) + self.assertEqual(par[:-1], (P('/a/b'), P('/a'))) + self.assertEqual(par[1:], (P('/a'), P('/'))) + self.assertEqual(par[::2], (P('/a/b'), P('/'))) + self.assertEqual(par[::-1], (P('/'), P('/a'), P('/a/b'))) + self.assertEqual(list(par), [P('/a/b'), P('/a'), P('/')]) + with self.assertRaises(IndexError): + par[-4] + with self.assertRaises(IndexError): + par[3] + + def test_drive_common(self): + P = self.cls + self.assertEqual(P('a/b').drive, '') + self.assertEqual(P('/a/b').drive, '') + self.assertEqual(P('').drive, '') + + def test_root_common(self): + P = self.cls + sep = self.sep + self.assertEqual(P('').root, '') + self.assertEqual(P('a/b').root, '') + self.assertEqual(P('/').root, sep) + self.assertEqual(P('/a/b').root, sep) + + def test_anchor_common(self): + P = self.cls + sep = self.sep + self.assertEqual(P('').anchor, '') + self.assertEqual(P('a/b').anchor, '') + self.assertEqual(P('/').anchor, sep) + self.assertEqual(P('/a/b').anchor, sep) + + def test_name_common(self): + P = self.cls + self.assertEqual(P('').name, '') + self.assertEqual(P('.').name, '') + self.assertEqual(P('/').name, '') + self.assertEqual(P('a/b').name, 'b') + self.assertEqual(P('/a/b').name, 'b') + self.assertEqual(P('/a/b/.').name, 'b') + self.assertEqual(P('a/b.py').name, 'b.py') + self.assertEqual(P('/a/b.py').name, 'b.py') + + def test_suffix_common(self): + P = self.cls + self.assertEqual(P('').suffix, '') + self.assertEqual(P('.').suffix, '') + self.assertEqual(P('..').suffix, '') + self.assertEqual(P('/').suffix, '') + self.assertEqual(P('a/b').suffix, '') + self.assertEqual(P('/a/b').suffix, '') + self.assertEqual(P('/a/b/.').suffix, '') + self.assertEqual(P('a/b.py').suffix, '.py') + self.assertEqual(P('/a/b.py').suffix, '.py') + self.assertEqual(P('a/.hgrc').suffix, '') + self.assertEqual(P('/a/.hgrc').suffix, '') + self.assertEqual(P('a/.hg.rc').suffix, '.rc') + self.assertEqual(P('/a/.hg.rc').suffix, '.rc') + self.assertEqual(P('a/b.tar.gz').suffix, '.gz') + self.assertEqual(P('/a/b.tar.gz').suffix, '.gz') + self.assertEqual(P('a/Some name. Ending with a dot.').suffix, '') + self.assertEqual(P('/a/Some name. Ending with a dot.').suffix, '') + + def test_suffixes_common(self): + P = self.cls + self.assertEqual(P('').suffixes, []) + self.assertEqual(P('.').suffixes, []) + self.assertEqual(P('/').suffixes, []) + self.assertEqual(P('a/b').suffixes, []) + self.assertEqual(P('/a/b').suffixes, []) + self.assertEqual(P('/a/b/.').suffixes, []) + self.assertEqual(P('a/b.py').suffixes, ['.py']) + self.assertEqual(P('/a/b.py').suffixes, ['.py']) + self.assertEqual(P('a/.hgrc').suffixes, []) + self.assertEqual(P('/a/.hgrc').suffixes, []) + self.assertEqual(P('a/.hg.rc').suffixes, ['.rc']) + self.assertEqual(P('/a/.hg.rc').suffixes, ['.rc']) + self.assertEqual(P('a/b.tar.gz').suffixes, ['.tar', '.gz']) + self.assertEqual(P('/a/b.tar.gz').suffixes, ['.tar', '.gz']) + self.assertEqual(P('a/Some name. Ending with a dot.').suffixes, []) + self.assertEqual(P('/a/Some name. Ending with a dot.').suffixes, []) + + def test_stem_common(self): + P = self.cls + self.assertEqual(P('').stem, '') + self.assertEqual(P('.').stem, '') + self.assertEqual(P('..').stem, '..') + self.assertEqual(P('/').stem, '') + self.assertEqual(P('a/b').stem, 'b') + self.assertEqual(P('a/b.py').stem, 'b') + self.assertEqual(P('a/.hgrc').stem, '.hgrc') + self.assertEqual(P('a/.hg.rc').stem, '.hg') + self.assertEqual(P('a/b.tar.gz').stem, 'b.tar') + self.assertEqual(P('a/Some name. Ending with a dot.').stem, + 'Some name. Ending with a dot.') + + def test_with_name_common(self): + P = self.cls + self.assertEqual(P('a/b').with_name('d.xml'), P('a/d.xml')) + self.assertEqual(P('/a/b').with_name('d.xml'), P('/a/d.xml')) + self.assertEqual(P('a/b.py').with_name('d.xml'), P('a/d.xml')) + self.assertEqual(P('/a/b.py').with_name('d.xml'), P('/a/d.xml')) + self.assertEqual(P('a/Dot ending.').with_name('d.xml'), P('a/d.xml')) + self.assertEqual(P('/a/Dot ending.').with_name('d.xml'), P('/a/d.xml')) + self.assertRaises(ValueError, P('').with_name, 'd.xml') + self.assertRaises(ValueError, P('.').with_name, 'd.xml') + self.assertRaises(ValueError, P('/').with_name, 'd.xml') + self.assertRaises(ValueError, P('a/b').with_name, '') + self.assertRaises(ValueError, P('a/b').with_name, '.') + self.assertRaises(ValueError, P('a/b').with_name, '/c') + self.assertRaises(ValueError, P('a/b').with_name, 'c/') + self.assertRaises(ValueError, P('a/b').with_name, 'c/d') + + def test_with_stem_common(self): + P = self.cls + self.assertEqual(P('a/b').with_stem('d'), P('a/d')) + self.assertEqual(P('/a/b').with_stem('d'), P('/a/d')) + self.assertEqual(P('a/b.py').with_stem('d'), P('a/d.py')) + self.assertEqual(P('/a/b.py').with_stem('d'), P('/a/d.py')) + self.assertEqual(P('/a/b.tar.gz').with_stem('d'), P('/a/d.gz')) + self.assertEqual(P('a/Dot ending.').with_stem('d'), P('a/d')) + self.assertEqual(P('/a/Dot ending.').with_stem('d'), P('/a/d')) + self.assertRaises(ValueError, P('').with_stem, 'd') + self.assertRaises(ValueError, P('.').with_stem, 'd') + self.assertRaises(ValueError, P('/').with_stem, 'd') + self.assertRaises(ValueError, P('a/b').with_stem, '') + self.assertRaises(ValueError, P('a/b').with_stem, '.') + self.assertRaises(ValueError, P('a/b').with_stem, '/c') + self.assertRaises(ValueError, P('a/b').with_stem, 'c/') + self.assertRaises(ValueError, P('a/b').with_stem, 'c/d') + + def test_with_suffix_common(self): + P = self.cls + self.assertEqual(P('a/b').with_suffix('.gz'), P('a/b.gz')) + self.assertEqual(P('/a/b').with_suffix('.gz'), P('/a/b.gz')) + self.assertEqual(P('a/b.py').with_suffix('.gz'), P('a/b.gz')) + self.assertEqual(P('/a/b.py').with_suffix('.gz'), P('/a/b.gz')) + # Stripping suffix. + self.assertEqual(P('a/b.py').with_suffix(''), P('a/b')) + self.assertEqual(P('/a/b').with_suffix(''), P('/a/b')) + # Path doesn't have a "filename" component. + self.assertRaises(ValueError, P('').with_suffix, '.gz') + self.assertRaises(ValueError, P('.').with_suffix, '.gz') + self.assertRaises(ValueError, P('/').with_suffix, '.gz') + # Invalid suffix. + self.assertRaises(ValueError, P('a/b').with_suffix, 'gz') + self.assertRaises(ValueError, P('a/b').with_suffix, '/') + self.assertRaises(ValueError, P('a/b').with_suffix, '.') + self.assertRaises(ValueError, P('a/b').with_suffix, '/.gz') + self.assertRaises(ValueError, P('a/b').with_suffix, 'c/d') + self.assertRaises(ValueError, P('a/b').with_suffix, '.c/.d') + self.assertRaises(ValueError, P('a/b').with_suffix, './.d') + self.assertRaises(ValueError, P('a/b').with_suffix, '.d/.') + + def test_relative_to_common(self): + P = self.cls + p = P('a/b') + self.assertRaises(TypeError, p.relative_to) + self.assertRaises(TypeError, p.relative_to, b'a') + self.assertEqual(p.relative_to(P()), P('a/b')) + self.assertEqual(p.relative_to(''), P('a/b')) + self.assertEqual(p.relative_to(P('a')), P('b')) + self.assertEqual(p.relative_to('a'), P('b')) + self.assertEqual(p.relative_to('a/'), P('b')) + self.assertEqual(p.relative_to(P('a/b')), P()) + self.assertEqual(p.relative_to('a/b'), P()) + self.assertEqual(p.relative_to(P(), walk_up=True), P('a/b')) + self.assertEqual(p.relative_to('', walk_up=True), P('a/b')) + self.assertEqual(p.relative_to(P('a'), walk_up=True), P('b')) + self.assertEqual(p.relative_to('a', walk_up=True), P('b')) + self.assertEqual(p.relative_to('a/', walk_up=True), P('b')) + self.assertEqual(p.relative_to(P('a/b'), walk_up=True), P()) + self.assertEqual(p.relative_to('a/b', walk_up=True), P()) + self.assertEqual(p.relative_to(P('a/c'), walk_up=True), P('../b')) + self.assertEqual(p.relative_to('a/c', walk_up=True), P('../b')) + self.assertEqual(p.relative_to(P('a/b/c'), walk_up=True), P('..')) + self.assertEqual(p.relative_to('a/b/c', walk_up=True), P('..')) + self.assertEqual(p.relative_to(P('c'), walk_up=True), P('../a/b')) + self.assertEqual(p.relative_to('c', walk_up=True), P('../a/b')) + # With several args. + with self.assertWarns(DeprecationWarning): + p.relative_to('a', 'b') + p.relative_to('a', 'b', walk_up=True) + # Unrelated paths. + self.assertRaises(ValueError, p.relative_to, P('c')) + self.assertRaises(ValueError, p.relative_to, P('a/b/c')) + self.assertRaises(ValueError, p.relative_to, P('a/c')) + self.assertRaises(ValueError, p.relative_to, P('/a')) + self.assertRaises(ValueError, p.relative_to, P("../a")) + self.assertRaises(ValueError, p.relative_to, P("a/..")) + self.assertRaises(ValueError, p.relative_to, P("/a/..")) + self.assertRaises(ValueError, p.relative_to, P('/'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('/a'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P("../a"), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P("a/.."), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P("/a/.."), walk_up=True) + p = P('/a/b') + self.assertEqual(p.relative_to(P('/')), P('a/b')) + self.assertEqual(p.relative_to('/'), P('a/b')) + self.assertEqual(p.relative_to(P('/a')), P('b')) + self.assertEqual(p.relative_to('/a'), P('b')) + self.assertEqual(p.relative_to('/a/'), P('b')) + self.assertEqual(p.relative_to(P('/a/b')), P()) + self.assertEqual(p.relative_to('/a/b'), P()) + self.assertEqual(p.relative_to(P('/'), walk_up=True), P('a/b')) + self.assertEqual(p.relative_to('/', walk_up=True), P('a/b')) + self.assertEqual(p.relative_to(P('/a'), walk_up=True), P('b')) + self.assertEqual(p.relative_to('/a', walk_up=True), P('b')) + self.assertEqual(p.relative_to('/a/', walk_up=True), P('b')) + self.assertEqual(p.relative_to(P('/a/b'), walk_up=True), P()) + self.assertEqual(p.relative_to('/a/b', walk_up=True), P()) + self.assertEqual(p.relative_to(P('/a/c'), walk_up=True), P('../b')) + self.assertEqual(p.relative_to('/a/c', walk_up=True), P('../b')) + self.assertEqual(p.relative_to(P('/a/b/c'), walk_up=True), P('..')) + self.assertEqual(p.relative_to('/a/b/c', walk_up=True), P('..')) + self.assertEqual(p.relative_to(P('/c'), walk_up=True), P('../a/b')) + self.assertEqual(p.relative_to('/c', walk_up=True), P('../a/b')) + # Unrelated paths. + self.assertRaises(ValueError, p.relative_to, P('/c')) + self.assertRaises(ValueError, p.relative_to, P('/a/b/c')) + self.assertRaises(ValueError, p.relative_to, P('/a/c')) + self.assertRaises(ValueError, p.relative_to, P()) + self.assertRaises(ValueError, p.relative_to, '') + self.assertRaises(ValueError, p.relative_to, P('a')) + self.assertRaises(ValueError, p.relative_to, P("../a")) + self.assertRaises(ValueError, p.relative_to, P("a/..")) + self.assertRaises(ValueError, p.relative_to, P("/a/..")) + self.assertRaises(ValueError, p.relative_to, P(''), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P('a'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P("../a"), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P("a/.."), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P("/a/.."), walk_up=True) + + def test_is_relative_to_common(self): + P = self.cls + p = P('a/b') + self.assertRaises(TypeError, p.is_relative_to) + self.assertRaises(TypeError, p.is_relative_to, b'a') + self.assertTrue(p.is_relative_to(P())) + self.assertTrue(p.is_relative_to('')) + self.assertTrue(p.is_relative_to(P('a'))) + self.assertTrue(p.is_relative_to('a/')) + self.assertTrue(p.is_relative_to(P('a/b'))) + self.assertTrue(p.is_relative_to('a/b')) + # With several args. + with self.assertWarns(DeprecationWarning): + p.is_relative_to('a', 'b') + # Unrelated paths. + self.assertFalse(p.is_relative_to(P('c'))) + self.assertFalse(p.is_relative_to(P('a/b/c'))) + self.assertFalse(p.is_relative_to(P('a/c'))) + self.assertFalse(p.is_relative_to(P('/a'))) + p = P('/a/b') + self.assertTrue(p.is_relative_to(P('/'))) + self.assertTrue(p.is_relative_to('/')) + self.assertTrue(p.is_relative_to(P('/a'))) + self.assertTrue(p.is_relative_to('/a')) + self.assertTrue(p.is_relative_to('/a/')) + self.assertTrue(p.is_relative_to(P('/a/b'))) + self.assertTrue(p.is_relative_to('/a/b')) + # Unrelated paths. + self.assertFalse(p.is_relative_to(P('/c'))) + self.assertFalse(p.is_relative_to(P('/a/b/c'))) + self.assertFalse(p.is_relative_to(P('/a/c'))) + self.assertFalse(p.is_relative_to(P())) + self.assertFalse(p.is_relative_to('')) + self.assertFalse(p.is_relative_to(P('a'))) + + +# +# Tests for the virtual classes. +# + +class PathBaseTest(PurePathBaseTest): + cls = pathlib._abc.PathBase + + def test_unsupported_operation(self): + P = self.cls + p = self.cls() + e = pathlib.UnsupportedOperation + self.assertRaises(e, p.stat) + self.assertRaises(e, p.lstat) + self.assertRaises(e, p.exists) + self.assertRaises(e, p.samefile, 'foo') + self.assertRaises(e, p.is_dir) + self.assertRaises(e, p.is_file) + self.assertRaises(e, p.is_mount) + self.assertRaises(e, p.is_symlink) + self.assertRaises(e, p.is_block_device) + self.assertRaises(e, p.is_char_device) + self.assertRaises(e, p.is_fifo) + self.assertRaises(e, p.is_socket) + self.assertRaises(e, p.open) + self.assertRaises(e, p.read_bytes) + self.assertRaises(e, p.read_text) + self.assertRaises(e, p.write_bytes, b'foo') + self.assertRaises(e, p.write_text, 'foo') + self.assertRaises(e, p.iterdir) + self.assertRaises(e, p.glob, '*') + self.assertRaises(e, p.rglob, '*') + self.assertRaises(e, lambda: list(p.walk())) + self.assertRaises(e, p.absolute) + self.assertRaises(e, P.cwd) + self.assertRaises(e, p.expanduser) + self.assertRaises(e, p.home) + self.assertRaises(e, p.readlink) + self.assertRaises(e, p.symlink_to, 'foo') + self.assertRaises(e, p.hardlink_to, 'foo') + self.assertRaises(e, p.mkdir) + self.assertRaises(e, p.touch) + self.assertRaises(e, p.rename, 'foo') + self.assertRaises(e, p.replace, 'foo') + self.assertRaises(e, p.chmod, 0o755) + self.assertRaises(e, p.lchmod, 0o755) + self.assertRaises(e, p.unlink) + self.assertRaises(e, p.rmdir) + self.assertRaises(e, p.owner) + self.assertRaises(e, p.group) + self.assertRaises(e, p.as_uri) + + def test_as_uri_common(self): + e = pathlib.UnsupportedOperation + self.assertRaises(e, self.cls().as_uri) + + def test_fspath_common(self): + self.assertRaises(TypeError, os.fspath, self.cls()) + + def test_as_bytes_common(self): + self.assertRaises(TypeError, bytes, self.cls()) + + def test_matches_path_api(self): + our_names = {name for name in dir(self.cls) if name[0] != '_'} + path_names = {name for name in dir(pathlib.Path) if name[0] != '_'} + self.assertEqual(our_names, path_names) + for attr_name in our_names: + our_attr = getattr(self.cls, attr_name) + path_attr = getattr(pathlib.Path, attr_name) + self.assertEqual(our_attr.__doc__, path_attr.__doc__) + + +class DummyPathIO(io.BytesIO): + """ + Used by DummyPath to implement `open('w')` + """ + + def __init__(self, files, path): + super().__init__() + self.files = files + self.path = path + + def close(self): + self.files[self.path] = self.getvalue() + super().close() + + +class DummyPath(pathlib._abc.PathBase): + """ + Simple implementation of PathBase that keeps files and directories in + memory. + """ + _files = {} + _directories = {} + _symlinks = {} + + def __eq__(self, other): + if not isinstance(other, DummyPath): + return NotImplemented + return str(self) == str(other) + + def __hash__(self): + return hash(str(self)) + + def stat(self, *, follow_symlinks=True): + if follow_symlinks: + path = str(self.resolve()) + else: + path = str(self.parent.resolve() / self.name) + if path in self._files: + st_mode = stat.S_IFREG + elif path in self._directories: + st_mode = stat.S_IFDIR + elif path in self._symlinks: + st_mode = stat.S_IFLNK + else: + raise FileNotFoundError(errno.ENOENT, "Not found", str(self)) + return os.stat_result((st_mode, hash(str(self)), 0, 0, 0, 0, 0, 0, 0, 0)) + + def open(self, mode='r', buffering=-1, encoding=None, + errors=None, newline=None): + if buffering != -1: + raise NotImplementedError + path_obj = self.resolve() + path = str(path_obj) + name = path_obj.name + parent = str(path_obj.parent) + if path in self._directories: + raise IsADirectoryError(errno.EISDIR, "Is a directory", path) + + text = 'b' not in mode + mode = ''.join(c for c in mode if c not in 'btU') + if mode == 'r': + if path not in self._files: + raise FileNotFoundError(errno.ENOENT, "File not found", path) + stream = io.BytesIO(self._files[path]) + elif mode == 'w': + if parent not in self._directories: + raise FileNotFoundError(errno.ENOENT, "File not found", parent) + stream = DummyPathIO(self._files, path) + self._files[path] = b'' + self._directories[parent].add(name) + else: + raise NotImplementedError + if text: + stream = io.TextIOWrapper(stream, encoding=encoding, errors=errors, newline=newline) + return stream + + def iterdir(self): + path = str(self.resolve()) + if path in self._files: + raise NotADirectoryError(errno.ENOTDIR, "Not a directory", path) + elif path in self._directories: + return (self / name for name in self._directories[path]) + else: + raise FileNotFoundError(errno.ENOENT, "File not found", path) + + def mkdir(self, mode=0o777, parents=False, exist_ok=False): + path = str(self.resolve()) + if path in self._directories: + if exist_ok: + return + else: + raise FileExistsError(errno.EEXIST, "File exists", path) + try: + if self.name: + self._directories[str(self.parent)].add(self.name) + self._directories[path] = set() + except KeyError: + if not parents: + raise FileNotFoundError(errno.ENOENT, "File not found", str(self.parent)) from None + self.parent.mkdir(parents=True, exist_ok=True) + self.mkdir(mode, parents=False, exist_ok=exist_ok) + + +class DummyPathTest(DummyPurePathTest): + """Tests for PathBase methods that use stat(), open() and iterdir().""" + + cls = DummyPath + can_symlink = False + + # (self.base) + # | + # |-- brokenLink -> non-existing + # |-- dirA + # | `-- linkC -> ../dirB + # |-- dirB + # | |-- fileB + # | `-- linkD -> ../dirB + # |-- dirC + # | |-- dirD + # | | `-- fileD + # | `-- fileC + # | `-- novel.txt + # |-- dirE # No permissions + # |-- fileA + # |-- linkA -> fileA + # |-- linkB -> dirB + # `-- brokenLinkLoop -> brokenLinkLoop + # + + def setUp(self): + super().setUp() + pathmod = self.cls.pathmod + p = self.cls(self.base) + p.mkdir(parents=True) + p.joinpath('dirA').mkdir() + p.joinpath('dirB').mkdir() + p.joinpath('dirC').mkdir() + p.joinpath('dirC', 'dirD').mkdir() + p.joinpath('dirE').mkdir() + with p.joinpath('fileA').open('wb') as f: + f.write(b"this is file A\n") + with p.joinpath('dirB', 'fileB').open('wb') as f: + f.write(b"this is file B\n") + with p.joinpath('dirC', 'fileC').open('wb') as f: + f.write(b"this is file C\n") + with p.joinpath('dirC', 'novel.txt').open('wb') as f: + f.write(b"this is a novel\n") + with p.joinpath('dirC', 'dirD', 'fileD').open('wb') as f: + f.write(b"this is file D\n") + if self.can_symlink: + p.joinpath('linkA').symlink_to('fileA') + p.joinpath('brokenLink').symlink_to('non-existing') + p.joinpath('linkB').symlink_to('dirB') + p.joinpath('dirA', 'linkC').symlink_to(pathmod.join('..', 'dirB')) + p.joinpath('dirB', 'linkD').symlink_to(pathmod.join('..', 'dirB')) + p.joinpath('brokenLinkLoop').symlink_to('brokenLinkLoop') + + def tearDown(self): + cls = self.cls + cls._files.clear() + cls._directories.clear() + cls._symlinks.clear() + + def tempdir(self): + path = self.cls(self.base).with_name('tmp-dirD') + path.mkdir() + return path + + def assertFileNotFound(self, func, *args, **kwargs): + with self.assertRaises(FileNotFoundError) as cm: + func(*args, **kwargs) + self.assertEqual(cm.exception.errno, errno.ENOENT) + + def assertEqualNormCase(self, path_a, path_b): + self.assertEqual(os.path.normcase(path_a), os.path.normcase(path_b)) + + def test_samefile(self): + fileA_path = os.path.join(self.base, 'fileA') + fileB_path = os.path.join(self.base, 'dirB', 'fileB') + p = self.cls(fileA_path) + pp = self.cls(fileA_path) + q = self.cls(fileB_path) + self.assertTrue(p.samefile(fileA_path)) + self.assertTrue(p.samefile(pp)) + self.assertFalse(p.samefile(fileB_path)) + self.assertFalse(p.samefile(q)) + # Test the non-existent file case + non_existent = os.path.join(self.base, 'foo') + r = self.cls(non_existent) + self.assertRaises(FileNotFoundError, p.samefile, r) + self.assertRaises(FileNotFoundError, p.samefile, non_existent) + self.assertRaises(FileNotFoundError, r.samefile, p) + self.assertRaises(FileNotFoundError, r.samefile, non_existent) + self.assertRaises(FileNotFoundError, r.samefile, r) + self.assertRaises(FileNotFoundError, r.samefile, non_existent) + + def test_empty_path(self): + # The empty path points to '.' + p = self.cls('') + self.assertEqual(str(p), '.') + + def test_exists(self): + P = self.cls + p = P(self.base) + self.assertIs(True, p.exists()) + self.assertIs(True, (p / 'dirA').exists()) + self.assertIs(True, (p / 'fileA').exists()) + self.assertIs(False, (p / 'fileA' / 'bah').exists()) + if self.can_symlink: + self.assertIs(True, (p / 'linkA').exists()) + self.assertIs(True, (p / 'linkB').exists()) + self.assertIs(True, (p / 'linkB' / 'fileB').exists()) + self.assertIs(False, (p / 'linkA' / 'bah').exists()) + self.assertIs(False, (p / 'brokenLink').exists()) + self.assertIs(True, (p / 'brokenLink').exists(follow_symlinks=False)) + self.assertIs(False, (p / 'foo').exists()) + self.assertIs(False, P('/xyzzy').exists()) + self.assertIs(False, P(self.base + '\udfff').exists()) + self.assertIs(False, P(self.base + '\x00').exists()) + + def test_open_common(self): + p = self.cls(self.base) + with (p / 'fileA').open('r') as f: + self.assertIsInstance(f, io.TextIOBase) + self.assertEqual(f.read(), "this is file A\n") + with (p / 'fileA').open('rb') as f: + self.assertIsInstance(f, io.BufferedIOBase) + self.assertEqual(f.read().strip(), b"this is file A") + + def test_read_write_bytes(self): + p = self.cls(self.base) + (p / 'fileA').write_bytes(b'abcdefg') + self.assertEqual((p / 'fileA').read_bytes(), b'abcdefg') + # Check that trying to write str does not truncate the file. + self.assertRaises(TypeError, (p / 'fileA').write_bytes, 'somestr') + self.assertEqual((p / 'fileA').read_bytes(), b'abcdefg') + + def test_read_write_text(self): + p = self.cls(self.base) + (p / 'fileA').write_text('äbcdefg', encoding='latin-1') + self.assertEqual((p / 'fileA').read_text( + encoding='utf-8', errors='ignore'), 'bcdefg') + # Check that trying to write bytes does not truncate the file. + self.assertRaises(TypeError, (p / 'fileA').write_text, b'somebytes') + self.assertEqual((p / 'fileA').read_text(encoding='latin-1'), 'äbcdefg') + + def test_read_text_with_newlines(self): + p = self.cls(self.base) + # Check that `\n` character change nothing + (p / 'fileA').write_bytes(b'abcde\r\nfghlk\n\rmnopq') + self.assertEqual((p / 'fileA').read_text(newline='\n'), + 'abcde\r\nfghlk\n\rmnopq') + # Check that `\r` character replaces `\n` + (p / 'fileA').write_bytes(b'abcde\r\nfghlk\n\rmnopq') + self.assertEqual((p / 'fileA').read_text(newline='\r'), + 'abcde\r\nfghlk\n\rmnopq') + # Check that `\r\n` character replaces `\n` + (p / 'fileA').write_bytes(b'abcde\r\nfghlk\n\rmnopq') + self.assertEqual((p / 'fileA').read_text(newline='\r\n'), + 'abcde\r\nfghlk\n\rmnopq') + + def test_write_text_with_newlines(self): + p = self.cls(self.base) + # Check that `\n` character change nothing + (p / 'fileA').write_text('abcde\r\nfghlk\n\rmnopq', newline='\n') + self.assertEqual((p / 'fileA').read_bytes(), + b'abcde\r\nfghlk\n\rmnopq') + # Check that `\r` character replaces `\n` + (p / 'fileA').write_text('abcde\r\nfghlk\n\rmnopq', newline='\r') + self.assertEqual((p / 'fileA').read_bytes(), + b'abcde\r\rfghlk\r\rmnopq') + # Check that `\r\n` character replaces `\n` + (p / 'fileA').write_text('abcde\r\nfghlk\n\rmnopq', newline='\r\n') + self.assertEqual((p / 'fileA').read_bytes(), + b'abcde\r\r\nfghlk\r\n\rmnopq') + # Check that no argument passed will change `\n` to `os.linesep` + os_linesep_byte = bytes(os.linesep, encoding='ascii') + (p / 'fileA').write_text('abcde\nfghlk\n\rmnopq') + self.assertEqual((p / 'fileA').read_bytes(), + b'abcde' + os_linesep_byte + b'fghlk' + os_linesep_byte + b'\rmnopq') + + def test_iterdir(self): + P = self.cls + p = P(self.base) + it = p.iterdir() + paths = set(it) + expected = ['dirA', 'dirB', 'dirC', 'dirE', 'fileA'] + if self.can_symlink: + expected += ['linkA', 'linkB', 'brokenLink', 'brokenLinkLoop'] + self.assertEqual(paths, { P(self.base, q) for q in expected }) + + def test_iterdir_symlink(self): + if not self.can_symlink: + self.skipTest("symlinks required") + # __iter__ on a symlink to a directory. + P = self.cls + p = P(self.base, 'linkB') + paths = set(p.iterdir()) + expected = { P(self.base, 'linkB', q) for q in ['fileB', 'linkD'] } + self.assertEqual(paths, expected) + + def test_iterdir_nodir(self): + # __iter__ on something that is not a directory. + p = self.cls(self.base, 'fileA') + with self.assertRaises(OSError) as cm: + p.iterdir() + # ENOENT or EINVAL under Windows, ENOTDIR otherwise + # (see issue #12802). + self.assertIn(cm.exception.errno, (errno.ENOTDIR, + errno.ENOENT, errno.EINVAL)) + + def test_glob_common(self): + def _check(glob, expected): + self.assertEqual(set(glob), { P(self.base, q) for q in expected }) + P = self.cls + p = P(self.base) + it = p.glob("fileA") + self.assertIsInstance(it, collections.abc.Iterator) + _check(it, ["fileA"]) + _check(p.glob("fileB"), []) + _check(p.glob("dir*/file*"), ["dirB/fileB", "dirC/fileC"]) + if not self.can_symlink: + _check(p.glob("*A"), ['dirA', 'fileA']) + else: + _check(p.glob("*A"), ['dirA', 'fileA', 'linkA']) + if not self.can_symlink: + _check(p.glob("*B/*"), ['dirB/fileB']) + else: + _check(p.glob("*B/*"), ['dirB/fileB', 'dirB/linkD', + 'linkB/fileB', 'linkB/linkD']) + if not self.can_symlink: + _check(p.glob("*/fileB"), ['dirB/fileB']) + else: + _check(p.glob("*/fileB"), ['dirB/fileB', 'linkB/fileB']) + if self.can_symlink: + _check(p.glob("brokenLink"), ['brokenLink']) + + if not self.can_symlink: + _check(p.glob("*/"), ["dirA/", "dirB/", "dirC/", "dirE/"]) + else: + _check(p.glob("*/"), ["dirA/", "dirB/", "dirC/", "dirE/", "linkB/"]) + + def test_glob_empty_pattern(self): + p = self.cls() + with self.assertRaisesRegex(ValueError, 'Unacceptable pattern'): + list(p.glob('')) + + def test_glob_case_sensitive(self): + P = self.cls + def _check(path, pattern, case_sensitive, expected): + actual = {str(q) for q in path.glob(pattern, case_sensitive=case_sensitive)} + expected = {str(P(self.base, q)) for q in expected} + self.assertEqual(actual, expected) + path = P(self.base) + _check(path, "DIRB/FILE*", True, []) + _check(path, "DIRB/FILE*", False, ["dirB/fileB"]) + _check(path, "dirb/file*", True, []) + _check(path, "dirb/file*", False, ["dirB/fileB"]) + + def test_glob_follow_symlinks_common(self): + if not self.can_symlink: + self.skipTest("symlinks required") + def _check(path, glob, expected): + actual = {path for path in path.glob(glob, follow_symlinks=True) + if "linkD" not in path.parent.parts} # exclude symlink loop. + self.assertEqual(actual, { P(self.base, q) for q in expected }) + P = self.cls + p = P(self.base) + _check(p, "fileB", []) + _check(p, "dir*/file*", ["dirB/fileB", "dirC/fileC"]) + _check(p, "*A", ["dirA", "fileA", "linkA"]) + _check(p, "*B/*", ["dirB/fileB", "dirB/linkD", "linkB/fileB", "linkB/linkD"]) + _check(p, "*/fileB", ["dirB/fileB", "linkB/fileB"]) + _check(p, "*/", ["dirA/", "dirB/", "dirC/", "dirE/", "linkB/"]) + _check(p, "dir*/*/..", ["dirC/dirD/..", "dirA/linkC/.."]) + _check(p, "dir*/**/", ["dirA/", "dirA/linkC/", "dirA/linkC/linkD/", "dirB/", "dirB/linkD/", + "dirC/", "dirC/dirD/", "dirE/"]) + _check(p, "dir*/**/..", ["dirA/..", "dirA/linkC/..", "dirB/..", + "dirC/..", "dirC/dirD/..", "dirE/.."]) + _check(p, "dir*/*/**/", ["dirA/linkC/", "dirA/linkC/linkD/", "dirB/linkD/", "dirC/dirD/"]) + _check(p, "dir*/*/**/..", ["dirA/linkC/..", "dirC/dirD/.."]) + _check(p, "dir*/**/fileC", ["dirC/fileC"]) + _check(p, "dir*/*/../dirD/**/", ["dirC/dirD/../dirD/"]) + _check(p, "*/dirD/**/", ["dirC/dirD/"]) + + def test_glob_no_follow_symlinks_common(self): + if not self.can_symlink: + self.skipTest("symlinks required") + def _check(path, glob, expected): + actual = {path for path in path.glob(glob, follow_symlinks=False)} + self.assertEqual(actual, { P(self.base, q) for q in expected }) + P = self.cls + p = P(self.base) + _check(p, "fileB", []) + _check(p, "dir*/file*", ["dirB/fileB", "dirC/fileC"]) + _check(p, "*A", ["dirA", "fileA", "linkA"]) + _check(p, "*B/*", ["dirB/fileB", "dirB/linkD"]) + _check(p, "*/fileB", ["dirB/fileB"]) + _check(p, "*/", ["dirA/", "dirB/", "dirC/", "dirE/"]) + _check(p, "dir*/*/..", ["dirC/dirD/.."]) + _check(p, "dir*/**/", ["dirA/", "dirB/", "dirC/", "dirC/dirD/", "dirE/"]) + _check(p, "dir*/**/..", ["dirA/..", "dirB/..", "dirC/..", "dirC/dirD/..", "dirE/.."]) + _check(p, "dir*/*/**/", ["dirC/dirD/"]) + _check(p, "dir*/*/**/..", ["dirC/dirD/.."]) + _check(p, "dir*/**/fileC", ["dirC/fileC"]) + _check(p, "dir*/*/../dirD/**/", ["dirC/dirD/../dirD/"]) + _check(p, "*/dirD/**/", ["dirC/dirD/"]) + + def test_rglob_common(self): + def _check(glob, expected): + self.assertEqual(set(glob), {P(self.base, q) for q in expected}) + P = self.cls + p = P(self.base) + it = p.rglob("fileA") + self.assertIsInstance(it, collections.abc.Iterator) + _check(it, ["fileA"]) + _check(p.rglob("fileB"), ["dirB/fileB"]) + _check(p.rglob("**/fileB"), ["dirB/fileB"]) + _check(p.rglob("*/fileA"), []) + if not self.can_symlink: + _check(p.rglob("*/fileB"), ["dirB/fileB"]) + else: + _check(p.rglob("*/fileB"), ["dirB/fileB", "dirB/linkD/fileB", + "linkB/fileB", "dirA/linkC/fileB"]) + _check(p.rglob("file*"), ["fileA", "dirB/fileB", + "dirC/fileC", "dirC/dirD/fileD"]) + if not self.can_symlink: + _check(p.rglob("*/"), [ + "dirA/", "dirB/", "dirC/", "dirC/dirD/", "dirE/", + ]) + else: + _check(p.rglob("*/"), [ + "dirA/", "dirA/linkC/", "dirB/", "dirB/linkD/", "dirC/", + "dirC/dirD/", "dirE/", "linkB/", + ]) + _check(p.rglob(""), ["./", "dirA/", "dirB/", "dirC/", "dirE/", "dirC/dirD/"]) + + p = P(self.base, "dirC") + _check(p.rglob("*"), ["dirC/fileC", "dirC/novel.txt", + "dirC/dirD", "dirC/dirD/fileD"]) + _check(p.rglob("file*"), ["dirC/fileC", "dirC/dirD/fileD"]) + _check(p.rglob("**/file*"), ["dirC/fileC", "dirC/dirD/fileD"]) + _check(p.rglob("dir*/**/"), ["dirC/dirD/"]) + _check(p.rglob("*/*"), ["dirC/dirD/fileD"]) + _check(p.rglob("*/"), ["dirC/dirD/"]) + _check(p.rglob(""), ["dirC/", "dirC/dirD/"]) + _check(p.rglob("**/"), ["dirC/", "dirC/dirD/"]) + # gh-91616, a re module regression + _check(p.rglob("*.txt"), ["dirC/novel.txt"]) + _check(p.rglob("*.*"), ["dirC/novel.txt"]) + + def test_rglob_follow_symlinks_common(self): + if not self.can_symlink: + self.skipTest("symlinks required") + def _check(path, glob, expected): + actual = {path for path in path.rglob(glob, follow_symlinks=True) + if 'linkD' not in path.parent.parts} # exclude symlink loop. + self.assertEqual(actual, { P(self.base, q) for q in expected }) + P = self.cls + p = P(self.base) + _check(p, "fileB", ["dirB/fileB", "dirA/linkC/fileB", "linkB/fileB"]) + _check(p, "*/fileA", []) + _check(p, "*/fileB", ["dirB/fileB", "dirA/linkC/fileB", "linkB/fileB"]) + _check(p, "file*", ["fileA", "dirA/linkC/fileB", "dirB/fileB", + "dirC/fileC", "dirC/dirD/fileD", "linkB/fileB"]) + _check(p, "*/", ["dirA/", "dirA/linkC/", "dirA/linkC/linkD/", "dirB/", "dirB/linkD/", + "dirC/", "dirC/dirD/", "dirE/", "linkB/", "linkB/linkD/"]) + _check(p, "", ["./", "dirA/", "dirA/linkC/", "dirA/linkC/linkD/", "dirB/", "dirB/linkD/", + "dirC/", "dirE/", "dirC/dirD/", "linkB/", "linkB/linkD/"]) + + p = P(self.base, "dirC") + _check(p, "*", ["dirC/fileC", "dirC/novel.txt", + "dirC/dirD", "dirC/dirD/fileD"]) + _check(p, "file*", ["dirC/fileC", "dirC/dirD/fileD"]) + _check(p, "*/*", ["dirC/dirD/fileD"]) + _check(p, "*/", ["dirC/dirD/"]) + _check(p, "", ["dirC/", "dirC/dirD/"]) + # gh-91616, a re module regression + _check(p, "*.txt", ["dirC/novel.txt"]) + _check(p, "*.*", ["dirC/novel.txt"]) + + def test_rglob_no_follow_symlinks_common(self): + if not self.can_symlink: + self.skipTest("symlinks required") + def _check(path, glob, expected): + actual = {path for path in path.rglob(glob, follow_symlinks=False)} + self.assertEqual(actual, { P(self.base, q) for q in expected }) + P = self.cls + p = P(self.base) + _check(p, "fileB", ["dirB/fileB"]) + _check(p, "*/fileA", []) + _check(p, "*/fileB", ["dirB/fileB"]) + _check(p, "file*", ["fileA", "dirB/fileB", "dirC/fileC", "dirC/dirD/fileD", ]) + _check(p, "*/", ["dirA/", "dirB/", "dirC/", "dirC/dirD/", "dirE/"]) + _check(p, "", ["./", "dirA/", "dirB/", "dirC/", "dirE/", "dirC/dirD/"]) + + p = P(self.base, "dirC") + _check(p, "*", ["dirC/fileC", "dirC/novel.txt", + "dirC/dirD", "dirC/dirD/fileD"]) + _check(p, "file*", ["dirC/fileC", "dirC/dirD/fileD"]) + _check(p, "*/*", ["dirC/dirD/fileD"]) + _check(p, "*/", ["dirC/dirD/"]) + _check(p, "", ["dirC/", "dirC/dirD/"]) + # gh-91616, a re module regression + _check(p, "*.txt", ["dirC/novel.txt"]) + _check(p, "*.*", ["dirC/novel.txt"]) + + def test_rglob_symlink_loop(self): + # Don't get fooled by symlink loops (Issue #26012). + if not self.can_symlink: + self.skipTest("symlinks required") + P = self.cls + p = P(self.base) + given = set(p.rglob('*')) + expect = {'brokenLink', + 'dirA', 'dirA/linkC', + 'dirB', 'dirB/fileB', 'dirB/linkD', + 'dirC', 'dirC/dirD', 'dirC/dirD/fileD', + 'dirC/fileC', 'dirC/novel.txt', + 'dirE', + 'fileA', + 'linkA', + 'linkB', + 'brokenLinkLoop', + } + self.assertEqual(given, {p / x for x in expect}) + + def test_glob_many_open_files(self): + depth = 30 + P = self.cls + p = base = P(self.base) / 'deep' + p.mkdir() + for _ in range(depth): + p /= 'd' + p.mkdir() + pattern = '/'.join(['*'] * depth) + iters = [base.glob(pattern) for j in range(100)] + for it in iters: + self.assertEqual(next(it), p) + iters = [base.rglob('d') for j in range(100)] + p = base + for i in range(depth): + p = p / 'd' + for it in iters: + self.assertEqual(next(it), p) + + def test_glob_dotdot(self): + # ".." is not special in globs. + P = self.cls + p = P(self.base) + self.assertEqual(set(p.glob("..")), { P(self.base, "..") }) + self.assertEqual(set(p.glob("../..")), { P(self.base, "..", "..") }) + self.assertEqual(set(p.glob("dirA/..")), { P(self.base, "dirA", "..") }) + self.assertEqual(set(p.glob("dirA/../file*")), { P(self.base, "dirA/../fileA") }) + self.assertEqual(set(p.glob("dirA/../file*/..")), set()) + self.assertEqual(set(p.glob("../xyzzy")), set()) + self.assertEqual(set(p.glob("xyzzy/..")), set()) + self.assertEqual(set(p.glob("/".join([".."] * 50))), { P(self.base, *[".."] * 50)}) + + def test_glob_permissions(self): + # See bpo-38894 + if not self.can_symlink: + self.skipTest("symlinks required") + P = self.cls + base = P(self.base) / 'permissions' + base.mkdir() + + for i in range(100): + link = base / f"link{i}" + if i % 2: + link.symlink_to(P(self.base, "dirE", "nonexistent")) + else: + link.symlink_to(P(self.base, "dirC")) + + self.assertEqual(len(set(base.glob("*"))), 100) + self.assertEqual(len(set(base.glob("*/"))), 50) + self.assertEqual(len(set(base.glob("*/fileC"))), 50) + self.assertEqual(len(set(base.glob("*/file*"))), 50) + + def test_glob_long_symlink(self): + # See gh-87695 + if not self.can_symlink: + self.skipTest("symlinks required") + base = self.cls(self.base) / 'long_symlink' + base.mkdir() + bad_link = base / 'bad_link' + bad_link.symlink_to("bad" * 200) + self.assertEqual(sorted(base.glob('**/*')), [bad_link]) + + def test_glob_above_recursion_limit(self): + recursion_limit = 50 + # directory_depth > recursion_limit + directory_depth = recursion_limit + 10 + base = self.cls(self.base, 'deep') + path = base.joinpath(*(['d'] * directory_depth)) + path.mkdir(parents=True) + + with set_recursion_limit(recursion_limit): + list(base.glob('**/')) + + def test_glob_recursive_no_trailing_slash(self): + P = self.cls + p = P(self.base) + with self.assertWarns(FutureWarning): + p.glob('**') + with self.assertWarns(FutureWarning): + p.glob('*/**') + with self.assertWarns(FutureWarning): + p.rglob('**') + with self.assertWarns(FutureWarning): + p.rglob('*/**') + + + def test_readlink(self): + if not self.can_symlink: + self.skipTest("symlinks required") + P = self.cls(self.base) + self.assertEqual((P / 'linkA').readlink(), self.cls('fileA')) + self.assertEqual((P / 'brokenLink').readlink(), + self.cls('non-existing')) + self.assertEqual((P / 'linkB').readlink(), self.cls('dirB')) + self.assertEqual((P / 'linkB' / 'linkD').readlink(), self.cls('../dirB')) + with self.assertRaises(OSError): + (P / 'fileA').readlink() + + @unittest.skipIf(hasattr(os, "readlink"), "os.readlink() is present") + def test_readlink_unsupported(self): + P = self.cls(self.base) + p = P / 'fileA' + with self.assertRaises(pathlib.UnsupportedOperation): + q.readlink(p) + + def _check_resolve(self, p, expected, strict=True): + q = p.resolve(strict) + self.assertEqual(q, expected) + + # This can be used to check both relative and absolute resolutions. + _check_resolve_relative = _check_resolve_absolute = _check_resolve + + def test_resolve_common(self): + if not self.can_symlink: + self.skipTest("symlinks required") + P = self.cls + p = P(self.base, 'foo') + with self.assertRaises(OSError) as cm: + p.resolve(strict=True) + self.assertEqual(cm.exception.errno, errno.ENOENT) + # Non-strict + self.assertEqualNormCase(str(p.resolve(strict=False)), + os.path.join(self.base, 'foo')) + p = P(self.base, 'foo', 'in', 'spam') + self.assertEqualNormCase(str(p.resolve(strict=False)), + os.path.join(self.base, 'foo', 'in', 'spam')) + p = P(self.base, '..', 'foo', 'in', 'spam') + self.assertEqualNormCase(str(p.resolve(strict=False)), + os.path.abspath(os.path.join('foo', 'in', 'spam'))) + # These are all relative symlinks. + p = P(self.base, 'dirB', 'fileB') + self._check_resolve_relative(p, p) + p = P(self.base, 'linkA') + self._check_resolve_relative(p, P(self.base, 'fileA')) + p = P(self.base, 'dirA', 'linkC', 'fileB') + self._check_resolve_relative(p, P(self.base, 'dirB', 'fileB')) + p = P(self.base, 'dirB', 'linkD', 'fileB') + self._check_resolve_relative(p, P(self.base, 'dirB', 'fileB')) + # Non-strict + p = P(self.base, 'dirA', 'linkC', 'fileB', 'foo', 'in', 'spam') + self._check_resolve_relative(p, P(self.base, 'dirB', 'fileB', 'foo', 'in', + 'spam'), False) + p = P(self.base, 'dirA', 'linkC', '..', 'foo', 'in', 'spam') + if os.name == 'nt' and isinstance(p, pathlib.Path): + # In Windows, if linkY points to dirB, 'dirA\linkY\..' + # resolves to 'dirA' without resolving linkY first. + self._check_resolve_relative(p, P(self.base, 'dirA', 'foo', 'in', + 'spam'), False) + else: + # In Posix, if linkY points to dirB, 'dirA/linkY/..' + # resolves to 'dirB/..' first before resolving to parent of dirB. + self._check_resolve_relative(p, P(self.base, 'foo', 'in', 'spam'), False) + # Now create absolute symlinks. + d = self.tempdir() + P(self.base, 'dirA', 'linkX').symlink_to(d) + P(self.base, str(d), 'linkY').symlink_to(self.pathmod.join(self.base, 'dirB')) + p = P(self.base, 'dirA', 'linkX', 'linkY', 'fileB') + self._check_resolve_absolute(p, P(self.base, 'dirB', 'fileB')) + # Non-strict + p = P(self.base, 'dirA', 'linkX', 'linkY', 'foo', 'in', 'spam') + self._check_resolve_relative(p, P(self.base, 'dirB', 'foo', 'in', 'spam'), + False) + p = P(self.base, 'dirA', 'linkX', 'linkY', '..', 'foo', 'in', 'spam') + if os.name == 'nt' and isinstance(p, pathlib.Path): + # In Windows, if linkY points to dirB, 'dirA\linkY\..' + # resolves to 'dirA' without resolving linkY first. + self._check_resolve_relative(p, P(d, 'foo', 'in', 'spam'), False) + else: + # In Posix, if linkY points to dirB, 'dirA/linkY/..' + # resolves to 'dirB/..' first before resolving to parent of dirB. + self._check_resolve_relative(p, P(self.base, 'foo', 'in', 'spam'), False) + + def test_resolve_dot(self): + # See http://web.archive.org/web/20200623062557/https://bitbucket.org/pitrou/pathlib/issues/9/ + if not self.can_symlink: + self.skipTest("symlinks required") + p = self.cls(self.base) + p.joinpath('0').symlink_to('.', target_is_directory=True) + p.joinpath('1').symlink_to(os.path.join('0', '0'), target_is_directory=True) + p.joinpath('2').symlink_to(os.path.join('1', '1'), target_is_directory=True) + q = p / '2' + self.assertEqual(q.resolve(strict=True), p) + r = q / '3' / '4' + self.assertRaises(FileNotFoundError, r.resolve, strict=True) + # Non-strict + self.assertEqual(r.resolve(strict=False), p / '3' / '4') + + def _check_symlink_loop(self, *args): + path = self.cls(*args) + with self.assertRaises(OSError) as cm: + path.resolve(strict=True) + self.assertEqual(cm.exception.errno, errno.ELOOP) + + def test_resolve_loop(self): + if not self.can_symlink: + self.skipTest("symlinks required") + if os.name == 'nt' and issubclass(self.cls, pathlib.Path): + self.skipTest("symlink loops work differently with concrete Windows paths") + # Loops with relative symlinks. + self.cls(self.base, 'linkX').symlink_to('linkX/inside') + self._check_symlink_loop(self.base, 'linkX') + self.cls(self.base, 'linkY').symlink_to('linkY') + self._check_symlink_loop(self.base, 'linkY') + self.cls(self.base, 'linkZ').symlink_to('linkZ/../linkZ') + self._check_symlink_loop(self.base, 'linkZ') + # Non-strict + p = self.cls(self.base, 'linkZ', 'foo') + self.assertEqual(p.resolve(strict=False), p) + # Loops with absolute symlinks. + self.cls(self.base, 'linkU').symlink_to(self.pathmod.join(self.base, 'linkU/inside')) + self._check_symlink_loop(self.base, 'linkU') + self.cls(self.base, 'linkV').symlink_to(self.pathmod.join(self.base, 'linkV')) + self._check_symlink_loop(self.base, 'linkV') + self.cls(self.base, 'linkW').symlink_to(self.pathmod.join(self.base, 'linkW/../linkW')) + self._check_symlink_loop(self.base, 'linkW') + # Non-strict + q = self.cls(self.base, 'linkW', 'foo') + self.assertEqual(q.resolve(strict=False), q) + + def test_stat(self): + statA = self.cls(self.base).joinpath('fileA').stat() + statB = self.cls(self.base).joinpath('dirB', 'fileB').stat() + statC = self.cls(self.base).joinpath('dirC').stat() + # st_mode: files are the same, directory differs. + self.assertIsInstance(statA.st_mode, int) + self.assertEqual(statA.st_mode, statB.st_mode) + self.assertNotEqual(statA.st_mode, statC.st_mode) + self.assertNotEqual(statB.st_mode, statC.st_mode) + # st_ino: all different, + self.assertIsInstance(statA.st_ino, int) + self.assertNotEqual(statA.st_ino, statB.st_ino) + self.assertNotEqual(statA.st_ino, statC.st_ino) + self.assertNotEqual(statB.st_ino, statC.st_ino) + # st_dev: all the same. + self.assertIsInstance(statA.st_dev, int) + self.assertEqual(statA.st_dev, statB.st_dev) + self.assertEqual(statA.st_dev, statC.st_dev) + # other attributes not used by pathlib. + + def test_stat_no_follow_symlinks(self): + if not self.can_symlink: + self.skipTest("symlinks required") + p = self.cls(self.base) / 'linkA' + st = p.stat() + self.assertNotEqual(st, p.stat(follow_symlinks=False)) + + def test_stat_no_follow_symlinks_nosymlink(self): + p = self.cls(self.base) / 'fileA' + st = p.stat() + self.assertEqual(st, p.stat(follow_symlinks=False)) + + def test_lstat(self): + if not self.can_symlink: + self.skipTest("symlinks required") + p = self.cls(self.base)/ 'linkA' + st = p.stat() + self.assertNotEqual(st, p.lstat()) + + def test_lstat_nosymlink(self): + p = self.cls(self.base) / 'fileA' + st = p.stat() + self.assertEqual(st, p.lstat()) + + def test_is_dir(self): + P = self.cls(self.base) + self.assertTrue((P / 'dirA').is_dir()) + self.assertFalse((P / 'fileA').is_dir()) + self.assertFalse((P / 'non-existing').is_dir()) + self.assertFalse((P / 'fileA' / 'bah').is_dir()) + if self.can_symlink: + self.assertFalse((P / 'linkA').is_dir()) + self.assertTrue((P / 'linkB').is_dir()) + self.assertFalse((P/ 'brokenLink').is_dir()) + self.assertFalse((P / 'dirA\udfff').is_dir()) + self.assertFalse((P / 'dirA\x00').is_dir()) + + def test_is_dir_no_follow_symlinks(self): + P = self.cls(self.base) + self.assertTrue((P / 'dirA').is_dir(follow_symlinks=False)) + self.assertFalse((P / 'fileA').is_dir(follow_symlinks=False)) + self.assertFalse((P / 'non-existing').is_dir(follow_symlinks=False)) + self.assertFalse((P / 'fileA' / 'bah').is_dir(follow_symlinks=False)) + if self.can_symlink: + self.assertFalse((P / 'linkA').is_dir(follow_symlinks=False)) + self.assertFalse((P / 'linkB').is_dir(follow_symlinks=False)) + self.assertFalse((P/ 'brokenLink').is_dir(follow_symlinks=False)) + self.assertFalse((P / 'dirA\udfff').is_dir(follow_symlinks=False)) + self.assertFalse((P / 'dirA\x00').is_dir(follow_symlinks=False)) + + def test_is_file(self): + P = self.cls(self.base) + self.assertTrue((P / 'fileA').is_file()) + self.assertFalse((P / 'dirA').is_file()) + self.assertFalse((P / 'non-existing').is_file()) + self.assertFalse((P / 'fileA' / 'bah').is_file()) + if self.can_symlink: + self.assertTrue((P / 'linkA').is_file()) + self.assertFalse((P / 'linkB').is_file()) + self.assertFalse((P/ 'brokenLink').is_file()) + self.assertFalse((P / 'fileA\udfff').is_file()) + self.assertFalse((P / 'fileA\x00').is_file()) + + def test_is_file_no_follow_symlinks(self): + P = self.cls(self.base) + self.assertTrue((P / 'fileA').is_file(follow_symlinks=False)) + self.assertFalse((P / 'dirA').is_file(follow_symlinks=False)) + self.assertFalse((P / 'non-existing').is_file(follow_symlinks=False)) + self.assertFalse((P / 'fileA' / 'bah').is_file(follow_symlinks=False)) + if self.can_symlink: + self.assertFalse((P / 'linkA').is_file(follow_symlinks=False)) + self.assertFalse((P / 'linkB').is_file(follow_symlinks=False)) + self.assertFalse((P/ 'brokenLink').is_file(follow_symlinks=False)) + self.assertFalse((P / 'fileA\udfff').is_file(follow_symlinks=False)) + self.assertFalse((P / 'fileA\x00').is_file(follow_symlinks=False)) + + def test_is_mount(self): + P = self.cls(self.base) + self.assertFalse((P / 'fileA').is_mount()) + self.assertFalse((P / 'dirA').is_mount()) + self.assertFalse((P / 'non-existing').is_mount()) + self.assertFalse((P / 'fileA' / 'bah').is_mount()) + if self.can_symlink: + self.assertFalse((P / 'linkA').is_mount()) + + def test_is_symlink(self): + P = self.cls(self.base) + self.assertFalse((P / 'fileA').is_symlink()) + self.assertFalse((P / 'dirA').is_symlink()) + self.assertFalse((P / 'non-existing').is_symlink()) + self.assertFalse((P / 'fileA' / 'bah').is_symlink()) + if self.can_symlink: + self.assertTrue((P / 'linkA').is_symlink()) + self.assertTrue((P / 'linkB').is_symlink()) + self.assertTrue((P/ 'brokenLink').is_symlink()) + self.assertIs((P / 'fileA\udfff').is_file(), False) + self.assertIs((P / 'fileA\x00').is_file(), False) + if self.can_symlink: + self.assertIs((P / 'linkA\udfff').is_file(), False) + self.assertIs((P / 'linkA\x00').is_file(), False) + + def test_is_junction_false(self): + P = self.cls(self.base) + self.assertFalse((P / 'fileA').is_junction()) + self.assertFalse((P / 'dirA').is_junction()) + self.assertFalse((P / 'non-existing').is_junction()) + self.assertFalse((P / 'fileA' / 'bah').is_junction()) + self.assertFalse((P / 'fileA\udfff').is_junction()) + self.assertFalse((P / 'fileA\x00').is_junction()) + + def test_is_fifo_false(self): + P = self.cls(self.base) + self.assertFalse((P / 'fileA').is_fifo()) + self.assertFalse((P / 'dirA').is_fifo()) + self.assertFalse((P / 'non-existing').is_fifo()) + self.assertFalse((P / 'fileA' / 'bah').is_fifo()) + self.assertIs((P / 'fileA\udfff').is_fifo(), False) + self.assertIs((P / 'fileA\x00').is_fifo(), False) + + def test_is_socket_false(self): + P = self.cls(self.base) + self.assertFalse((P / 'fileA').is_socket()) + self.assertFalse((P / 'dirA').is_socket()) + self.assertFalse((P / 'non-existing').is_socket()) + self.assertFalse((P / 'fileA' / 'bah').is_socket()) + self.assertIs((P / 'fileA\udfff').is_socket(), False) + self.assertIs((P / 'fileA\x00').is_socket(), False) + + def test_is_block_device_false(self): + P = self.cls(self.base) + self.assertFalse((P / 'fileA').is_block_device()) + self.assertFalse((P / 'dirA').is_block_device()) + self.assertFalse((P / 'non-existing').is_block_device()) + self.assertFalse((P / 'fileA' / 'bah').is_block_device()) + self.assertIs((P / 'fileA\udfff').is_block_device(), False) + self.assertIs((P / 'fileA\x00').is_block_device(), False) + + def test_is_char_device_false(self): + P = self.cls(self.base) + self.assertFalse((P / 'fileA').is_char_device()) + self.assertFalse((P / 'dirA').is_char_device()) + self.assertFalse((P / 'non-existing').is_char_device()) + self.assertFalse((P / 'fileA' / 'bah').is_char_device()) + self.assertIs((P / 'fileA\udfff').is_char_device(), False) + self.assertIs((P / 'fileA\x00').is_char_device(), False) + + def test_pickling_common(self): + p = self.cls(self.base, 'fileA') + for proto in range(0, pickle.HIGHEST_PROTOCOL + 1): + dumped = pickle.dumps(p, proto) + pp = pickle.loads(dumped) + self.assertEqual(pp.stat(), p.stat()) + + def test_parts_interning(self): + P = self.cls + p = P('/usr/bin/foo') + q = P('/usr/local/bin') + # 'usr' + self.assertIs(p.parts[1], q.parts[1]) + # 'bin' + self.assertIs(p.parts[2], q.parts[3]) + + def _check_complex_symlinks(self, link0_target): + if not self.can_symlink: + self.skipTest("symlinks required") + + # Test solving a non-looping chain of symlinks (issue #19887). + P = self.cls(self.base) + P.joinpath('link1').symlink_to(os.path.join('link0', 'link0'), target_is_directory=True) + P.joinpath('link2').symlink_to(os.path.join('link1', 'link1'), target_is_directory=True) + P.joinpath('link3').symlink_to(os.path.join('link2', 'link2'), target_is_directory=True) + P.joinpath('link0').symlink_to(link0_target, target_is_directory=True) + + # Resolve absolute paths. + p = (P / 'link0').resolve() + self.assertEqual(p, P) + self.assertEqualNormCase(str(p), self.base) + p = (P / 'link1').resolve() + self.assertEqual(p, P) + self.assertEqualNormCase(str(p), self.base) + p = (P / 'link2').resolve() + self.assertEqual(p, P) + self.assertEqualNormCase(str(p), self.base) + p = (P / 'link3').resolve() + self.assertEqual(p, P) + self.assertEqualNormCase(str(p), self.base) + + # Resolve relative paths. + try: + self.cls().absolute() + except pathlib.UnsupportedOperation: + return + old_path = os.getcwd() + os.chdir(self.base) + try: + p = self.cls('link0').resolve() + self.assertEqual(p, P) + self.assertEqualNormCase(str(p), self.base) + p = self.cls('link1').resolve() + self.assertEqual(p, P) + self.assertEqualNormCase(str(p), self.base) + p = self.cls('link2').resolve() + self.assertEqual(p, P) + self.assertEqualNormCase(str(p), self.base) + p = self.cls('link3').resolve() + self.assertEqual(p, P) + self.assertEqualNormCase(str(p), self.base) + finally: + os.chdir(old_path) + + def test_complex_symlinks_absolute(self): + self._check_complex_symlinks(self.base) + + def test_complex_symlinks_relative(self): + self._check_complex_symlinks('.') + + def test_complex_symlinks_relative_dot_dot(self): + self._check_complex_symlinks(os.path.join('dirA', '..')) + + def setUpWalk(self): + # Build: + # TESTFN/ + # TEST1/ a file kid and two directory kids + # tmp1 + # SUB1/ a file kid and a directory kid + # tmp2 + # SUB11/ no kids + # SUB2/ a file kid and a dirsymlink kid + # tmp3 + # link/ a symlink to TEST2 + # broken_link + # broken_link2 + # TEST2/ + # tmp4 a lone file + self.walk_path = self.cls(self.base, "TEST1") + self.sub1_path = self.walk_path / "SUB1" + self.sub11_path = self.sub1_path / "SUB11" + self.sub2_path = self.walk_path / "SUB2" + tmp1_path = self.walk_path / "tmp1" + tmp2_path = self.sub1_path / "tmp2" + tmp3_path = self.sub2_path / "tmp3" + self.link_path = self.sub2_path / "link" + t2_path = self.cls(self.base, "TEST2") + tmp4_path = self.cls(self.base, "TEST2", "tmp4") + broken_link_path = self.sub2_path / "broken_link" + broken_link2_path = self.sub2_path / "broken_link2" + + self.sub11_path.mkdir(parents=True) + self.sub2_path.mkdir(parents=True) + t2_path.mkdir(parents=True) + + for path in tmp1_path, tmp2_path, tmp3_path, tmp4_path: + with path.open("w", encoding='utf-8') as f: + f.write(f"I'm {path} and proud of it. Blame test_pathlib.\n") + + if self.can_symlink: + self.link_path.symlink_to(t2_path) + broken_link_path.symlink_to('broken') + broken_link2_path.symlink_to(self.cls('tmp3', 'broken')) + self.sub2_tree = (self.sub2_path, [], ["broken_link", "broken_link2", "link", "tmp3"]) + else: + self.sub2_tree = (self.sub2_path, [], ["tmp3"]) + + def test_walk_topdown(self): + self.setUpWalk() + walker = self.walk_path.walk() + entry = next(walker) + entry[1].sort() # Ensure we visit SUB1 before SUB2 + self.assertEqual(entry, (self.walk_path, ["SUB1", "SUB2"], ["tmp1"])) + entry = next(walker) + self.assertEqual(entry, (self.sub1_path, ["SUB11"], ["tmp2"])) + entry = next(walker) + self.assertEqual(entry, (self.sub11_path, [], [])) + entry = next(walker) + entry[1].sort() + entry[2].sort() + self.assertEqual(entry, self.sub2_tree) + with self.assertRaises(StopIteration): + next(walker) + + def test_walk_prune(self): + self.setUpWalk() + # Prune the search. + all = [] + for root, dirs, files in self.walk_path.walk(): + all.append((root, dirs, files)) + if 'SUB1' in dirs: + # Note that this also mutates the dirs we appended to all! + dirs.remove('SUB1') + + self.assertEqual(len(all), 2) + self.assertEqual(all[0], (self.walk_path, ["SUB2"], ["tmp1"])) + + all[1][-1].sort() + all[1][1].sort() + self.assertEqual(all[1], self.sub2_tree) + + def test_walk_bottom_up(self): + self.setUpWalk() + seen_testfn = seen_sub1 = seen_sub11 = seen_sub2 = False + for path, dirnames, filenames in self.walk_path.walk(top_down=False): + if path == self.walk_path: + self.assertFalse(seen_testfn) + self.assertTrue(seen_sub1) + self.assertTrue(seen_sub2) + self.assertEqual(sorted(dirnames), ["SUB1", "SUB2"]) + self.assertEqual(filenames, ["tmp1"]) + seen_testfn = True + elif path == self.sub1_path: + self.assertFalse(seen_testfn) + self.assertFalse(seen_sub1) + self.assertTrue(seen_sub11) + self.assertEqual(dirnames, ["SUB11"]) + self.assertEqual(filenames, ["tmp2"]) + seen_sub1 = True + elif path == self.sub11_path: + self.assertFalse(seen_sub1) + self.assertFalse(seen_sub11) + self.assertEqual(dirnames, []) + self.assertEqual(filenames, []) + seen_sub11 = True + elif path == self.sub2_path: + self.assertFalse(seen_testfn) + self.assertFalse(seen_sub2) + self.assertEqual(sorted(dirnames), sorted(self.sub2_tree[1])) + self.assertEqual(sorted(filenames), sorted(self.sub2_tree[2])) + seen_sub2 = True + else: + raise AssertionError(f"Unexpected path: {path}") + self.assertTrue(seen_testfn) + + def test_walk_follow_symlinks(self): + if not self.can_symlink: + self.skipTest("symlinks required") + self.setUpWalk() + walk_it = self.walk_path.walk(follow_symlinks=True) + for root, dirs, files in walk_it: + if root == self.link_path: + self.assertEqual(dirs, []) + self.assertEqual(files, ["tmp4"]) + break + else: + self.fail("Didn't follow symlink with follow_symlinks=True") + + def test_walk_symlink_location(self): + if not self.can_symlink: + self.skipTest("symlinks required") + self.setUpWalk() + # Tests whether symlinks end up in filenames or dirnames depending + # on the `follow_symlinks` argument. + walk_it = self.walk_path.walk(follow_symlinks=False) + for root, dirs, files in walk_it: + if root == self.sub2_path: + self.assertIn("link", files) + break + else: + self.fail("symlink not found") + + walk_it = self.walk_path.walk(follow_symlinks=True) + for root, dirs, files in walk_it: + if root == self.sub2_path: + self.assertIn("link", dirs) + break + else: + self.fail("symlink not found") + + def test_walk_above_recursion_limit(self): + recursion_limit = 40 + # directory_depth > recursion_limit + directory_depth = recursion_limit + 10 + base = self.cls(self.base, 'deep') + path = base.joinpath(*(['d'] * directory_depth)) + path.mkdir(parents=True) + + with set_recursion_limit(recursion_limit): + list(base.walk()) + list(base.walk(top_down=False)) + +class DummyPathWithSymlinks(DummyPath): + def readlink(self): + path = str(self.parent.resolve() / self.name) + if path in self._symlinks: + return self.with_segments(self._symlinks[path]) + elif path in self._files or path in self._directories: + raise OSError(errno.EINVAL, "Not a symlink", path) + else: + raise FileNotFoundError(errno.ENOENT, "File not found", path) + + def symlink_to(self, target, target_is_directory=False): + self._directories[str(self.parent)].add(self.name) + self._symlinks[str(self)] = str(target) + + +class DummyPathWithSymlinksTest(DummyPathTest): + cls = DummyPathWithSymlinks + can_symlink = True + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py index 5eeea54fd55f1a..6d3228bf92f8ca 100644 --- a/Lib/test/test_subprocess.py +++ b/Lib/test/test_subprocess.py @@ -3348,6 +3348,7 @@ def exit_handler(): @unittest.skipIf(not sysconfig.get_config_var("HAVE_VFORK"), "vfork() not enabled by configure.") @mock.patch("subprocess._fork_exec") + @mock.patch("subprocess._USE_POSIX_SPAWN", new=False) def test__use_vfork(self, mock_fork_exec): self.assertTrue(subprocess._USE_VFORK) # The default value regardless. mock_fork_exec.side_effect = RuntimeError("just testing args") @@ -3366,9 +3367,13 @@ def test__use_vfork(self, mock_fork_exec): @unittest.skipIf(not sysconfig.get_config_var("HAVE_VFORK"), "vfork() not enabled by configure.") @unittest.skipIf(sys.platform != "linux", "Linux only, requires strace.") + @mock.patch("subprocess._USE_POSIX_SPAWN", new=False) def test_vfork_used_when_expected(self): # This is a performance regression test to ensure we default to using # vfork() when possible. + # Technically this test could pass when posix_spawn is used as well + # because libc tends to implement that internally using vfork. But + # that'd just be testing a libc+kernel implementation detail. strace_binary = "/usr/bin/strace" # The only system calls we are interested in. strace_filter = "--trace=clone,clone2,clone3,fork,vfork,exit,exit_group" diff --git a/Lib/test/test_wmi.py b/Lib/test/test_wmi.py index 3445702846d8a0..bf8c52e646dc18 100644 --- a/Lib/test/test_wmi.py +++ b/Lib/test/test_wmi.py @@ -1,17 +1,29 @@ # Test the internal _wmi module on Windows # This is used by the platform module, and potentially others +import time import unittest -from test.support import import_helper, requires_resource +from test.support import import_helper, requires_resource, LOOPBACK_TIMEOUT # Do this first so test will be skipped if module doesn't exist _wmi = import_helper.import_module('_wmi', required_on=['win']) +def wmi_exec_query(query): + # gh-112278: WMI maybe slow response when first call. + try: + return _wmi.exec_query(query) + except WindowsError as e: + if e.winerror != 258: + raise + time.sleep(LOOPBACK_TIMEOUT) + return _wmi.exec_query(query) + + class WmiTests(unittest.TestCase): def test_wmi_query_os_version(self): - r = _wmi.exec_query("SELECT Version FROM Win32_OperatingSystem").split("\0") + r = wmi_exec_query("SELECT Version FROM Win32_OperatingSystem").split("\0") self.assertEqual(1, len(r)) k, eq, v = r[0].partition("=") self.assertEqual("=", eq, r[0]) @@ -28,7 +40,7 @@ def test_wmi_query_repeated(self): def test_wmi_query_error(self): # Invalid queries fail with OSError try: - _wmi.exec_query("SELECT InvalidColumnName FROM InvalidTableName") + wmi_exec_query("SELECT InvalidColumnName FROM InvalidTableName") except OSError as ex: if ex.winerror & 0xFFFFFFFF == 0x80041010: # This is the expected error code. All others should fail the test @@ -42,7 +54,7 @@ def test_wmi_query_repeated_error(self): def test_wmi_query_not_select(self): # Queries other than SELECT are blocked to avoid potential exploits with self.assertRaises(ValueError): - _wmi.exec_query("not select, just in case someone tries something") + wmi_exec_query("not select, just in case someone tries something") @requires_resource('cpu') def test_wmi_query_overflow(self): @@ -50,11 +62,11 @@ def test_wmi_query_overflow(self): # Test multiple times to ensure consistency for _ in range(2): with self.assertRaises(OSError): - _wmi.exec_query("SELECT * FROM CIM_DataFile") + wmi_exec_query("SELECT * FROM CIM_DataFile") def test_wmi_query_multiple_rows(self): # Multiple instances should have an extra null separator - r = _wmi.exec_query("SELECT ProcessId FROM Win32_Process WHERE ProcessId < 1000") + r = wmi_exec_query("SELECT ProcessId FROM Win32_Process WHERE ProcessId < 1000") self.assertFalse(r.startswith("\0"), r) self.assertFalse(r.endswith("\0"), r) it = iter(r.split("\0")) @@ -69,6 +81,6 @@ def test_wmi_query_threads(self): from concurrent.futures import ThreadPoolExecutor query = "SELECT ProcessId FROM Win32_Process WHERE ProcessId < 1000" with ThreadPoolExecutor(4) as pool: - task = [pool.submit(_wmi.exec_query, query) for _ in range(32)] + task = [pool.submit(wmi_exec_query, query) for _ in range(32)] for t in task: self.assertRegex(t.result(), "ProcessId=") diff --git a/Mac/IDLE/IDLE.app/Contents/Info.plist b/Mac/IDLE/IDLE.app/Contents/Info.plist index fea06d46324999..20b97b67f41d1a 100644 --- a/Mac/IDLE/IDLE.app/Contents/Info.plist +++ b/Mac/IDLE/IDLE.app/Contents/Info.plist @@ -56,5 +56,7 @@ %version% NSHighResolutionCapable + CFBundleAllowMixedLocalizations + diff --git a/Mac/Resources/app/Info.plist.in b/Mac/Resources/app/Info.plist.in index 4ec828ff176e7b..8362b19b361b62 100644 --- a/Mac/Resources/app/Info.plist.in +++ b/Mac/Resources/app/Info.plist.in @@ -58,5 +58,7 @@ (c) 2001-2023 Python Software Foundation. NSHighResolutionCapable + CFBundleAllowMixedLocalizations + diff --git a/Mac/Resources/framework/Info.plist.in b/Mac/Resources/framework/Info.plist.in index e131c205ef0b28..238441ce2c76c7 100644 --- a/Mac/Resources/framework/Info.plist.in +++ b/Mac/Resources/framework/Info.plist.in @@ -24,5 +24,7 @@ ???? CFBundleVersion %VERSION% + CFBundleAllowMixedLocalizations + diff --git a/Makefile.pre.in b/Makefile.pre.in index 5fb6ffc4e8f0cf..195fc0ddddecd3 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -2272,6 +2272,7 @@ TESTSUBDIRS= idlelib/idle_test \ test/test_importlib/source \ test/test_json \ test/test_module \ + test/test_pathlib \ test/test_peg_generator \ test/test_sqlite3 \ test/test_tkinter \ diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-12-14-20-08-35.gh-issue-113054.e20CtM.rst b/Misc/NEWS.d/next/Core and Builtins/2023-12-14-20-08-35.gh-issue-113054.e20CtM.rst new file mode 100644 index 00000000000000..d0729f9c44754c --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-12-14-20-08-35.gh-issue-113054.e20CtM.rst @@ -0,0 +1,2 @@ +Fixed bug where a redundant NOP is not removed, causing an assertion to fail +in the compiler in debug mode. diff --git a/Misc/NEWS.d/next/Documentation/2023-10-23-23-43-43.gh-issue-110746.yg77IE.rst b/Misc/NEWS.d/next/Documentation/2023-10-23-23-43-43.gh-issue-110746.yg77IE.rst new file mode 100644 index 00000000000000..215db7beb75dcf --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2023-10-23-23-43-43.gh-issue-110746.yg77IE.rst @@ -0,0 +1 @@ +Improved markup for valid options/values for methods ttk.treeview.column and ttk.treeview.heading, and for Layouts. diff --git a/Misc/NEWS.d/next/IDLE/2023-12-19-00-03-12.gh-issue-113269.lrU-IC.rst b/Misc/NEWS.d/next/IDLE/2023-12-19-00-03-12.gh-issue-113269.lrU-IC.rst new file mode 100644 index 00000000000000..72e75b7910e359 --- /dev/null +++ b/Misc/NEWS.d/next/IDLE/2023-12-19-00-03-12.gh-issue-113269.lrU-IC.rst @@ -0,0 +1 @@ +Fix test_editor hang on macOS Catalina. diff --git a/Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst b/Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst new file mode 100644 index 00000000000000..3d0e9e4078c934 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst @@ -0,0 +1,8 @@ +:func:`email.utils.getaddresses` and :func:`email.utils.parseaddr` now +return ``('', '')`` 2-tuples in more situations where invalid email +addresses are encountered instead of potentially inaccurate values. Add +optional *strict* parameter to these two functions: use ``strict=False`` to +get the old behavior, accept malformed inputs. +``getattr(email.utils, 'supports_strict_parsing', False)`` can be use to check +if the *strict* paramater is available. Patch by Thomas Dwyer and Victor +Stinner to improve the CVE-2023-27043 fix. diff --git a/Misc/NEWS.d/next/Library/2023-10-25-13-07-53.gh-issue-67790.jMn9Ad.rst b/Misc/NEWS.d/next/Library/2023-10-25-13-07-53.gh-issue-67790.jMn9Ad.rst new file mode 100644 index 00000000000000..44c5702a6551b0 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-10-25-13-07-53.gh-issue-67790.jMn9Ad.rst @@ -0,0 +1,2 @@ +Implement basic formatting support (minimum width, alignment, fill) for +:class:`fractions.Fraction`. diff --git a/Misc/NEWS.d/next/Library/2023-12-15-12-35-28.gh-issue-61648.G-4pz0.rst b/Misc/NEWS.d/next/Library/2023-12-15-12-35-28.gh-issue-61648.G-4pz0.rst new file mode 100644 index 00000000000000..c841e5c7f7683a --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-12-15-12-35-28.gh-issue-61648.G-4pz0.rst @@ -0,0 +1 @@ +Detect line numbers of properties in doctests. diff --git a/Misc/NEWS.d/next/Library/2023-12-15-18-10-26.gh-issue-113202.xv_Ww8.rst b/Misc/NEWS.d/next/Library/2023-12-15-18-10-26.gh-issue-113202.xv_Ww8.rst new file mode 100644 index 00000000000000..44f26aef60a33a --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-12-15-18-10-26.gh-issue-113202.xv_Ww8.rst @@ -0,0 +1 @@ +Add a ``strict`` option to ``batched()`` in the ``itertools`` module. diff --git a/Misc/NEWS.d/next/Library/2023-12-15-18-13-59.gh-issue-113119.al-569.rst b/Misc/NEWS.d/next/Library/2023-12-15-18-13-59.gh-issue-113119.al-569.rst new file mode 100644 index 00000000000000..94087b00515e97 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-12-15-18-13-59.gh-issue-113119.al-569.rst @@ -0,0 +1,2 @@ +:func:`os.posix_spawn` now accepts ``env=None``, which makes the newly spawned +process use the current process environment. Patch by Jakub Kulik. diff --git a/Misc/NEWS.d/next/Library/2023-12-16-01-10-47.gh-issue-113199.oDjnjL.rst b/Misc/NEWS.d/next/Library/2023-12-16-01-10-47.gh-issue-113199.oDjnjL.rst new file mode 100644 index 00000000000000..d8e0b1731d1e3b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-12-16-01-10-47.gh-issue-113199.oDjnjL.rst @@ -0,0 +1,3 @@ +Make ``http.client.HTTPResponse.read1`` and +``http.client.HTTPResponse.readline`` close IO after reading all data when +content length is known. Patch by Illia Volochii. diff --git a/Misc/NEWS.d/next/Library/2023-12-16-10-58-34.gh-issue-113117.0zF7bH.rst b/Misc/NEWS.d/next/Library/2023-12-16-10-58-34.gh-issue-113117.0zF7bH.rst new file mode 100644 index 00000000000000..718226a0021efe --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-12-16-10-58-34.gh-issue-113117.0zF7bH.rst @@ -0,0 +1,4 @@ +The :mod:`subprocess` module can now use the :func:`os.posix_spawn` function +with ``close_fds=True`` on platforms where +``posix_spawn_file_actions_addclosefrom_np`` is available. +Patch by Jakub Kulik. diff --git a/Misc/NEWS.d/next/Library/2023-12-16-23-56-42.gh-issue-113149.7LWgTS.rst b/Misc/NEWS.d/next/Library/2023-12-16-23-56-42.gh-issue-113149.7LWgTS.rst new file mode 100644 index 00000000000000..0faa67fefabeca --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-12-16-23-56-42.gh-issue-113149.7LWgTS.rst @@ -0,0 +1,2 @@ +Improve error message when a JSON array or object contains a trailing comma. +Patch by Carson Radtke. diff --git a/Misc/NEWS.d/next/Library/2023-12-18-09-47-54.gh-issue-113246.em930H.rst b/Misc/NEWS.d/next/Library/2023-12-18-09-47-54.gh-issue-113246.em930H.rst new file mode 100644 index 00000000000000..167bb37c0e0643 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-12-18-09-47-54.gh-issue-113246.em930H.rst @@ -0,0 +1 @@ +Update bundled pip to 23.3.2. diff --git a/Misc/NEWS.d/next/Windows/2023-12-14-19-00-29.gh-issue-113009.6LNdjz.rst b/Misc/NEWS.d/next/Windows/2023-12-14-19-00-29.gh-issue-113009.6LNdjz.rst new file mode 100644 index 00000000000000..6fd7f7f9afdfa2 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2023-12-14-19-00-29.gh-issue-113009.6LNdjz.rst @@ -0,0 +1,5 @@ +:mod:`multiprocessing`: On Windows, fix a race condition in +``Process.terminate()``: no longer set the ``returncode`` attribute to +always call ``WaitForSingleObject()`` in ``Process.wait()``. Previously, +sometimes the process was still running after ``TerminateProcess()`` even if +``GetExitCodeProcess()`` is not ``STILL_ACTIVE``. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/macOS/2023-12-10-20-30-06.gh-issue-102362.y8svbF.rst b/Misc/NEWS.d/next/macOS/2023-12-10-20-30-06.gh-issue-102362.y8svbF.rst new file mode 100644 index 00000000000000..55c5ac01434660 --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2023-12-10-20-30-06.gh-issue-102362.y8svbF.rst @@ -0,0 +1,3 @@ +Make sure the result of :func:`sysconfig.get_plaform` includes at least a +major and minor versions, even if ``MACOSX_DEPLOYMENT_TARGET`` is set to +only a major version during build to match the format expected by pip. diff --git a/Misc/NEWS.d/next/macOS/2023-12-16-11-45-32.gh-issue-108269.wVgCHF.rst b/Misc/NEWS.d/next/macOS/2023-12-16-11-45-32.gh-issue-108269.wVgCHF.rst new file mode 100644 index 00000000000000..85598454abcaad --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2023-12-16-11-45-32.gh-issue-108269.wVgCHF.rst @@ -0,0 +1,4 @@ +Set ``CFBundleAllowMixedLocalizations`` to true in the Info.plist for the +framework, embedded Python.app and IDLE.app with framework installs on +macOS. This allows applications to pick up the user's preferred locale when +that's different from english. diff --git a/Misc/sbom.spdx.json b/Misc/sbom.spdx.json index 09355640db888e..81f8486ea350c1 100644 --- a/Misc/sbom.spdx.json +++ b/Misc/sbom.spdx.json @@ -1570,18 +1570,18 @@ "fileName": "Modules/_decimal/libmpdec/vcdiv64.asm" }, { - "SPDXID": "SPDXRef-FILE-Lib-ensurepip-bundled-pip-23.3.1-py3-none-any.whl", + "SPDXID": "SPDXRef-FILE-Lib-ensurepip-bundled-pip-23.3.2-py3-none-any.whl", "checksums": [ { "algorithm": "SHA1", - "checksumValue": "4b2baddc0673f73017e531648a9ee27e47925e7a" + "checksumValue": "8e48f55ab2965ee64bd55cc91a8077d184a33e30" }, { "algorithm": "SHA256", - "checksumValue": "55eb67bb6171d37447e82213be585b75fe2b12b359e993773aca4de9247a052b" + "checksumValue": "5052d7889c1f9d05224cd41741acb7c5d6fa735ab34e339624a614eaaa7e7d76" } ], - "fileName": "Lib/ensurepip/_bundled/pip-23.3.1-py3-none-any.whl" + "fileName": "Lib/ensurepip/_bundled/pip-23.3.2-py3-none-any.whl" } ], "packages": [ @@ -1703,16 +1703,16 @@ "checksumValue": "7ccf472345f20d35bdc9d1841ff5f313260c2c33fe417f48c30ac46cccabf5be" } ], - "downloadLocation": "https://files.pythonhosted.org/packages/50/c2/e06851e8cc28dcad7c155f4753da8833ac06a5c704c109313b8d5a62968a/pip-23.2.1-py3-none-any.whl", + "downloadLocation": "https://files.pythonhosted.org/packages/15/aa/3f4c7bcee2057a76562a5b33ecbd199be08cdb4443a02e26bd2c3cf6fc39/pip-23.3.2-py3-none-any.whl", "externalRefs": [ { "referenceCategory": "SECURITY", - "referenceLocator": "cpe:2.3:a:pypa:pip:23.2.1:*:*:*:*:*:*:*", + "referenceLocator": "cpe:2.3:a:pypa:pip:23.3.2:*:*:*:*:*:*:*", "referenceType": "cpe23Type" }, { "referenceCategory": "PACKAGE_MANAGER", - "referenceLocator": "pkg:pypi/pip@23.2.1", + "referenceLocator": "pkg:pypi/pip@23.3.2", "referenceType": "purl" } ], @@ -1720,7 +1720,7 @@ "name": "pip", "originator": "Organization: Python Packaging Authority", "primaryPackagePurpose": "SOURCE", - "versionInfo": "23.2.1" + "versionInfo": "23.3.2" } ], "relationships": [ @@ -2285,7 +2285,7 @@ "spdxElementId": "SPDXRef-PACKAGE-mpdecimal" }, { - "relatedSpdxElement": "SPDXRef-FILE-Lib-ensurepip-bundled-pip-23.3.1-py3-none-any.whl", + "relatedSpdxElement": "SPDXRef-FILE-Lib-ensurepip-bundled-pip-23.3.2-py3-none-any.whl", "relationshipType": "CONTAINS", "spdxElementId": "SPDXRef-PACKAGE-pip" } diff --git a/Modules/_json.c b/Modules/_json.c index 0b1bfe34ad9304..24b292ce70e5eb 100644 --- a/Modules/_json.c +++ b/Modules/_json.c @@ -662,6 +662,7 @@ _parse_object_unicode(PyScannerObject *s, PyObject *memo, PyObject *pystr, Py_ss PyObject *key = NULL; int has_pairs_hook = (s->object_pairs_hook != Py_None); Py_ssize_t next_idx; + Py_ssize_t comma_idx; str = PyUnicode_DATA(pystr); kind = PyUnicode_KIND(pystr); @@ -741,10 +742,16 @@ _parse_object_unicode(PyScannerObject *s, PyObject *memo, PyObject *pystr, Py_ss raise_errmsg("Expecting ',' delimiter", pystr, idx); goto bail; } + comma_idx = idx; idx++; /* skip whitespace after , delimiter */ while (idx <= end_idx && IS_WHITESPACE(PyUnicode_READ(kind, str, idx))) idx++; + + if (idx <= end_idx && PyUnicode_READ(kind, str, idx) == '}') { + raise_errmsg("Illegal trailing comma before end of object", pystr, comma_idx); + goto bail; + } } } @@ -785,6 +792,7 @@ _parse_array_unicode(PyScannerObject *s, PyObject *memo, PyObject *pystr, Py_ssi PyObject *val = NULL; PyObject *rval; Py_ssize_t next_idx; + Py_ssize_t comma_idx; rval = PyList_New(0); if (rval == NULL) @@ -822,10 +830,16 @@ _parse_array_unicode(PyScannerObject *s, PyObject *memo, PyObject *pystr, Py_ssi raise_errmsg("Expecting ',' delimiter", pystr, idx); goto bail; } + comma_idx = idx; idx++; /* skip whitespace after , */ while (idx <= end_idx && IS_WHITESPACE(PyUnicode_READ(kind, str, idx))) idx++; + + if (idx <= end_idx && PyUnicode_READ(kind, str, idx) == ']') { + raise_errmsg("Illegal trailing comma before end of array", pystr, comma_idx); + goto bail; + } } } diff --git a/Modules/_testcapi/datetime.c b/Modules/_testcapi/datetime.c index 88f992915fa8c1..b1796039f0d83a 100644 --- a/Modules/_testcapi/datetime.c +++ b/Modules/_testcapi/datetime.c @@ -85,17 +85,25 @@ make_timezones_capi(PyObject *self, PyObject *args) { PyObject *offset = PyDelta_FromDSU(0, -18000, 0); PyObject *name = PyUnicode_FromString("EST"); + if (offset == NULL || name == NULL) { + Py_XDECREF(offset); + Py_XDECREF(name); + return NULL; + } PyObject *est_zone_capi = PyDateTimeAPI->TimeZone_FromTimeZone(offset, name); PyObject *est_zone_macro = PyTimeZone_FromOffsetAndName(offset, name); PyObject *est_zone_macro_noname = PyTimeZone_FromOffset(offset); - - Py_DecRef(offset); - Py_DecRef(name); - + Py_DECREF(offset); + Py_DECREF(name); + if (est_zone_capi == NULL || est_zone_macro == NULL || + est_zone_macro_noname == NULL) + { + goto error; + } PyObject *rv = PyTuple_New(3); if (rv == NULL) { - return NULL; + goto error; } PyTuple_SET_ITEM(rv, 0, est_zone_capi); @@ -103,6 +111,11 @@ make_timezones_capi(PyObject *self, PyObject *args) PyTuple_SET_ITEM(rv, 2, est_zone_macro_noname); return rv; +error: + Py_XDECREF(est_zone_capi); + Py_XDECREF(est_zone_macro); + Py_XDECREF(est_zone_macro_noname); + return NULL; } static PyObject * @@ -110,6 +123,11 @@ get_timezones_offset_zero(PyObject *self, PyObject *args) { PyObject *offset = PyDelta_FromDSU(0, 0, 0); PyObject *name = PyUnicode_FromString(""); + if (offset == NULL || name == NULL) { + Py_XDECREF(offset); + Py_XDECREF(name); + return NULL; + } // These two should return the UTC singleton PyObject *utc_singleton_0 = PyTimeZone_FromOffset(offset); @@ -117,16 +135,28 @@ get_timezones_offset_zero(PyObject *self, PyObject *args) // This one will return +00:00 zone, but not the UTC singleton PyObject *non_utc_zone = PyTimeZone_FromOffsetAndName(offset, name); - - Py_DecRef(offset); - Py_DecRef(name); + Py_DECREF(offset); + Py_DECREF(name); + if (utc_singleton_0 == NULL || utc_singleton_1 == NULL || + non_utc_zone == NULL) + { + goto error; + } PyObject *rv = PyTuple_New(3); + if (rv == NULL) { + goto error; + } PyTuple_SET_ITEM(rv, 0, utc_singleton_0); PyTuple_SET_ITEM(rv, 1, utc_singleton_1); PyTuple_SET_ITEM(rv, 2, non_utc_zone); return rv; +error: + Py_XDECREF(utc_singleton_0); + Py_XDECREF(utc_singleton_1); + Py_XDECREF(non_utc_zone); + return NULL; } static PyObject * diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index b3ddfae58e6fc0..3527dfa77279ac 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -196,11 +196,11 @@ test_dict_inner(int count) for (i = 0; i < count; i++) { v = PyLong_FromLong(i); if (v == NULL) { - return -1; + goto error; } if (PyDict_SetItem(dict, v, v) < 0) { Py_DECREF(v); - return -1; + goto error; } Py_DECREF(v); } @@ -214,11 +214,12 @@ test_dict_inner(int count) assert(v != UNINITIALIZED_PTR); i = PyLong_AS_LONG(v) + 1; o = PyLong_FromLong(i); - if (o == NULL) - return -1; + if (o == NULL) { + goto error; + } if (PyDict_SetItem(dict, k, o) < 0) { Py_DECREF(o); - return -1; + goto error; } Py_DECREF(o); k = v = UNINITIALIZED_PTR; @@ -236,6 +237,9 @@ test_dict_inner(int count) } else { return 0; } +error: + Py_DECREF(dict); + return -1; } @@ -1556,7 +1560,9 @@ test_structseq_newtype_doesnt_leak(PyObject *Py_UNUSED(self), descr.n_in_sequence = 1; PyTypeObject* structseq_type = PyStructSequence_NewType(&descr); - assert(structseq_type != NULL); + if (structseq_type == NULL) { + return NULL; + } assert(PyType_Check(structseq_type)); assert(PyType_FastSubclass(structseq_type, Py_TPFLAGS_TUPLE_SUBCLASS)); Py_DECREF(structseq_type); diff --git a/Modules/_testinternalcapi/test_lock.c b/Modules/_testinternalcapi/test_lock.c index 418f71c1441995..83081f73a72f64 100644 --- a/Modules/_testinternalcapi/test_lock.c +++ b/Modules/_testinternalcapi/test_lock.c @@ -372,6 +372,104 @@ test_lock_once(PyObject *self, PyObject *obj) Py_RETURN_NONE; } +struct test_rwlock_data { + Py_ssize_t nthreads; + _PyRWMutex rw; + PyEvent step1; + PyEvent step2; + PyEvent step3; + PyEvent done; +}; + +static void +rdlock_thread(void *arg) +{ + struct test_rwlock_data *test_data = arg; + + // Acquire the lock in read mode + _PyRWMutex_RLock(&test_data->rw); + PyEvent_Wait(&test_data->step1); + _PyRWMutex_RUnlock(&test_data->rw); + + _PyRWMutex_RLock(&test_data->rw); + PyEvent_Wait(&test_data->step3); + _PyRWMutex_RUnlock(&test_data->rw); + + if (_Py_atomic_add_ssize(&test_data->nthreads, -1) == 1) { + _PyEvent_Notify(&test_data->done); + } +} +static void +wrlock_thread(void *arg) +{ + struct test_rwlock_data *test_data = arg; + + // First acquire the lock in write mode + _PyRWMutex_Lock(&test_data->rw); + PyEvent_Wait(&test_data->step2); + _PyRWMutex_Unlock(&test_data->rw); + + if (_Py_atomic_add_ssize(&test_data->nthreads, -1) == 1) { + _PyEvent_Notify(&test_data->done); + } +} + +static void +wait_until(uintptr_t *ptr, uintptr_t value) +{ + // wait up to two seconds for *ptr == value + int iters = 0; + uintptr_t bits; + do { + pysleep(10); + bits = _Py_atomic_load_uintptr(ptr); + iters++; + } while (bits != value && iters < 200); +} + +static PyObject * +test_lock_rwlock(PyObject *self, PyObject *obj) +{ + struct test_rwlock_data test_data = {.nthreads = 3}; + + _PyRWMutex_Lock(&test_data.rw); + assert(test_data.rw.bits == 1); + + _PyRWMutex_Unlock(&test_data.rw); + assert(test_data.rw.bits == 0); + + // Start two readers + PyThread_start_new_thread(rdlock_thread, &test_data); + PyThread_start_new_thread(rdlock_thread, &test_data); + + // wait up to two seconds for the threads to attempt to read-lock "rw" + wait_until(&test_data.rw.bits, 8); + assert(test_data.rw.bits == 8); + + // start writer (while readers hold lock) + PyThread_start_new_thread(wrlock_thread, &test_data); + wait_until(&test_data.rw.bits, 10); + assert(test_data.rw.bits == 10); + + // readers release lock, writer should acquire it + _PyEvent_Notify(&test_data.step1); + wait_until(&test_data.rw.bits, 3); + assert(test_data.rw.bits == 3); + + // writer releases lock, readers acquire it + _PyEvent_Notify(&test_data.step2); + wait_until(&test_data.rw.bits, 8); + assert(test_data.rw.bits == 8); + + // readers release lock again + _PyEvent_Notify(&test_data.step3); + wait_until(&test_data.rw.bits, 0); + assert(test_data.rw.bits == 0); + + PyEvent_Wait(&test_data.done); + Py_RETURN_NONE; +} + static PyMethodDef test_methods[] = { {"test_lock_basic", test_lock_basic, METH_NOARGS}, {"test_lock_two_threads", test_lock_two_threads, METH_NOARGS}, @@ -380,6 +478,7 @@ static PyMethodDef test_methods[] = { _TESTINTERNALCAPI_BENCHMARK_LOCKS_METHODDEF {"test_lock_benchmark", test_lock_benchmark, METH_NOARGS}, {"test_lock_once", test_lock_once, METH_NOARGS}, + {"test_lock_rwlock", test_lock_rwlock, METH_NOARGS}, {NULL, NULL} /* sentinel */ }; diff --git a/Modules/_xxinterpqueuesmodule.c b/Modules/_xxinterpqueuesmodule.c index 2cc3a2ac5dc85f..537ba9188055dd 100644 --- a/Modules/_xxinterpqueuesmodule.c +++ b/Modules/_xxinterpqueuesmodule.c @@ -234,11 +234,13 @@ static int add_exctype(PyObject *mod, PyObject **p_state_field, const char *qualname, const char *doc, PyObject *base) { +#ifndef NDEBUG const char *dot = strrchr(qualname, '.'); assert(dot != NULL); const char *name = dot+1; assert(*p_state_field == NULL); assert(!PyObject_HasAttrStringWithError(mod, name)); +#endif PyObject *exctype = PyErr_NewExceptionWithDoc(qualname, doc, base, NULL); if (exctype == NULL) { return -1; @@ -1505,7 +1507,7 @@ queuesmod_is_full(PyObject *self, PyObject *args, PyObject *kwds) } int64_t qid = qidarg.id; - int is_full; + int is_full = 0; int err = queue_is_full(&_globals.queues, qid, &is_full); if (handle_queue_error(err, self, qid)) { return NULL; diff --git a/Modules/clinic/itertoolsmodule.c.h b/Modules/clinic/itertoolsmodule.c.h index fa2c5e0e922387..3ec479943a83d4 100644 --- a/Modules/clinic/itertoolsmodule.c.h +++ b/Modules/clinic/itertoolsmodule.c.h @@ -10,7 +10,7 @@ preserve #include "pycore_modsupport.h" // _PyArg_UnpackKeywords() PyDoc_STRVAR(batched_new__doc__, -"batched(iterable, n)\n" +"batched(iterable, n, *, strict=False)\n" "--\n" "\n" "Batch data into tuples of length n. The last batch may be shorter than n.\n" @@ -25,10 +25,14 @@ PyDoc_STRVAR(batched_new__doc__, " ...\n" " (\'A\', \'B\', \'C\')\n" " (\'D\', \'E\', \'F\')\n" -" (\'G\',)"); +" (\'G\',)\n" +"\n" +"If \"strict\" is True, raises a ValueError if the final batch is shorter\n" +"than n."); static PyObject * -batched_new_impl(PyTypeObject *type, PyObject *iterable, Py_ssize_t n); +batched_new_impl(PyTypeObject *type, PyObject *iterable, Py_ssize_t n, + int strict); static PyObject * batched_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) @@ -36,14 +40,14 @@ batched_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) - #define NUM_KEYWORDS 2 + #define NUM_KEYWORDS 3 static struct { PyGC_Head _this_is_not_used; PyObject_VAR_HEAD PyObject *ob_item[NUM_KEYWORDS]; } _kwtuple = { .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) - .ob_item = { &_Py_ID(iterable), &_Py_ID(n), }, + .ob_item = { &_Py_ID(iterable), &_Py_ID(n), &_Py_ID(strict), }, }; #undef NUM_KEYWORDS #define KWTUPLE (&_kwtuple.ob_base.ob_base) @@ -52,18 +56,20 @@ batched_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) # define KWTUPLE NULL #endif // !Py_BUILD_CORE - static const char * const _keywords[] = {"iterable", "n", NULL}; + static const char * const _keywords[] = {"iterable", "n", "strict", NULL}; static _PyArg_Parser _parser = { .keywords = _keywords, .fname = "batched", .kwtuple = KWTUPLE, }; #undef KWTUPLE - PyObject *argsbuf[2]; + PyObject *argsbuf[3]; PyObject * const *fastargs; Py_ssize_t nargs = PyTuple_GET_SIZE(args); + Py_ssize_t noptargs = nargs + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - 2; PyObject *iterable; Py_ssize_t n; + int strict = 0; fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, 2, 2, 0, argsbuf); if (!fastargs) { @@ -82,7 +88,15 @@ batched_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) } n = ival; } - return_value = batched_new_impl(type, iterable, n); + if (!noptargs) { + goto skip_optional_kwonly; + } + strict = PyObject_IsTrue(fastargs[2]); + if (strict < 0) { + goto exit; + } +skip_optional_kwonly: + return_value = batched_new_impl(type, iterable, n, strict); exit: return return_value; @@ -914,4 +928,4 @@ itertools_count(PyTypeObject *type, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=782fe7e30733779b input=a9049054013a1b77]*/ +/*[clinic end generated code: output=c6a515f765da86b5 input=a9049054013a1b77]*/ diff --git a/Modules/getpath.c b/Modules/getpath.c index 422056b1fb6de4..a3c8fc269d1c3c 100644 --- a/Modules/getpath.c +++ b/Modules/getpath.c @@ -22,7 +22,7 @@ #endif /* Reference the precompiled getpath.py */ -#include "../Python/frozen_modules/getpath.h" +#include "Python/frozen_modules/getpath.h" #if (!defined(PREFIX) || !defined(EXEC_PREFIX) \ || !defined(VERSION) || !defined(VPATH) \ @@ -506,12 +506,17 @@ getpath_realpath(PyObject *Py_UNUSED(self) , PyObject *args) HANDLE hFile; wchar_t resolved[MAXPATHLEN+1]; int len = 0, err; + Py_ssize_t pathlen; PyObject *result; - wchar_t *path = PyUnicode_AsWideCharString(pathobj, NULL); + wchar_t *path = PyUnicode_AsWideCharString(pathobj, &pathlen); if (!path) { return NULL; } + if (wcslen(path) != pathlen) { + PyErr_SetString(PyExc_ValueError, "path contains embedded nulls"); + return NULL; + } Py_BEGIN_ALLOW_THREADS hFile = CreateFileW(path, 0, 0, NULL, OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL); @@ -535,7 +540,11 @@ getpath_realpath(PyObject *Py_UNUSED(self) , PyObject *args) len -= 4; } } - result = PyUnicode_FromWideChar(p, len); + if (CompareStringOrdinal(path, (int)pathlen, p, len, TRUE) == CSTR_EQUAL) { + result = Py_NewRef(pathobj); + } else { + result = PyUnicode_FromWideChar(p, len); + } } else { result = Py_NewRef(pathobj); } diff --git a/Modules/itertoolsmodule.c b/Modules/itertoolsmodule.c index ab99fa4d873bf5..164741495c7baf 100644 --- a/Modules/itertoolsmodule.c +++ b/Modules/itertoolsmodule.c @@ -105,20 +105,11 @@ class itertools.pairwise "pairwiseobject *" "clinic_state()->pairwise_type" /* batched object ************************************************************/ -/* Note: The built-in zip() function includes a "strict" argument - that was needed because that function would silently truncate data, - and there was no easy way for a user to detect the data loss. - The same reasoning does not apply to batched() which never drops data. - Instead, batched() produces a shorter tuple which can be handled - as the user sees fit. If requested, it would be reasonable to add - "fillvalue" support which had demonstrated value in zip_longest(). - For now, the API is kept simple and clean. - */ - typedef struct { PyObject_HEAD PyObject *it; Py_ssize_t batch_size; + bool strict; } batchedobject; /*[clinic input] @@ -126,6 +117,9 @@ typedef struct { itertools.batched.__new__ as batched_new iterable: object n: Py_ssize_t + * + strict: bool = False + Batch data into tuples of length n. The last batch may be shorter than n. Loops over the input iterable and accumulates data into tuples @@ -140,11 +134,15 @@ or when the input iterable is exhausted. ('D', 'E', 'F') ('G',) +If "strict" is True, raises a ValueError if the final batch is shorter +than n. + [clinic start generated code]*/ static PyObject * -batched_new_impl(PyTypeObject *type, PyObject *iterable, Py_ssize_t n) -/*[clinic end generated code: output=7ebc954d655371b6 input=ffd70726927c5129]*/ +batched_new_impl(PyTypeObject *type, PyObject *iterable, Py_ssize_t n, + int strict) +/*[clinic end generated code: output=c6de11b061529d3e input=7814b47e222f5467]*/ { PyObject *it; batchedobject *bo; @@ -170,6 +168,7 @@ batched_new_impl(PyTypeObject *type, PyObject *iterable, Py_ssize_t n) } bo->batch_size = n; bo->it = it; + bo->strict = (bool) strict; return (PyObject *)bo; } @@ -233,6 +232,12 @@ batched_next(batchedobject *bo) Py_DECREF(result); return NULL; } + if (bo->strict) { + Py_CLEAR(bo->it); + Py_DECREF(result); + PyErr_SetString(PyExc_ValueError, "batched(): incomplete batch"); + return NULL; + } _PyTuple_Resize(&result, i); return result; } diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index b464a28e63b8ac..c7ee591f30c51f 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -1557,6 +1557,7 @@ _Py_Sigset_Converter(PyObject *obj, void *addr) ** man environ(7). */ #include +#define USE_DARWIN_NS_GET_ENVIRON 1 #elif !defined(_MSC_VER) && (!defined(__WATCOMC__) || defined(__QNX__) || defined(__VXWORKS__)) extern char **environ; #endif /* !_MSC_VER */ @@ -1579,7 +1580,7 @@ convertenviron(void) through main() instead of wmain(). */ (void)_wgetenv(L""); e = _wenviron; -#elif defined(WITH_NEXT_FRAMEWORK) || (defined(__APPLE__) && defined(Py_ENABLE_SHARED)) +#elif defined(USE_DARWIN_NS_GET_ENVIRON) /* environ is not accessible as an extern in a shared object on OSX; use _NSGetEnviron to resolve it. The value changes if you add environment variables between calls to Py_Initialize, so don't cache the value. */ @@ -6834,6 +6835,9 @@ enum posix_spawn_file_actions_identifier { POSIX_SPAWN_OPEN, POSIX_SPAWN_CLOSE, POSIX_SPAWN_DUP2 +#ifdef HAVE_POSIX_SPAWN_FILE_ACTIONS_ADDCLOSEFROM_NP + ,POSIX_SPAWN_CLOSEFROM +#endif }; #if defined(HAVE_SCHED_SETPARAM) || defined(HAVE_SCHED_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDPARAM) @@ -7074,6 +7078,24 @@ parse_file_actions(PyObject *file_actions, } break; } +#ifdef HAVE_POSIX_SPAWN_FILE_ACTIONS_ADDCLOSEFROM_NP + case POSIX_SPAWN_CLOSEFROM: { + int fd; + if (!PyArg_ParseTuple(file_action, "Oi" + ";A closefrom file_action tuple must have 2 elements", + &tag_obj, &fd)) + { + goto fail; + } + errno = posix_spawn_file_actions_addclosefrom_np(file_actionsp, + fd); + if (errno) { + posix_error(); + goto fail; + } + break; + } +#endif default: { PyErr_SetString(PyExc_TypeError, "Unknown file_actions identifier"); @@ -7129,9 +7151,9 @@ py_posix_spawn(int use_posix_spawnp, PyObject *module, path_t *path, PyObject *a return NULL; } - if (!PyMapping_Check(env)) { + if (!PyMapping_Check(env) && env != Py_None) { PyErr_Format(PyExc_TypeError, - "%s: environment must be a mapping object", func_name); + "%s: environment must be a mapping object or None", func_name); goto exit; } @@ -7145,9 +7167,21 @@ py_posix_spawn(int use_posix_spawnp, PyObject *module, path_t *path, PyObject *a goto exit; } - envlist = parse_envlist(env, &envc); - if (envlist == NULL) { - goto exit; +#ifdef USE_DARWIN_NS_GET_ENVIRON + // There is no environ global in this situation. + char **environ = NULL; +#endif + + if (env == Py_None) { +#ifdef USE_DARWIN_NS_GET_ENVIRON + environ = *_NSGetEnviron(); +#endif + envlist = environ; + } else { + envlist = parse_envlist(env, &envc); + if (envlist == NULL) { + goto exit; + } } if (file_actions != NULL && file_actions != Py_None) { @@ -7210,7 +7244,7 @@ py_posix_spawn(int use_posix_spawnp, PyObject *module, path_t *path, PyObject *a if (attrp) { (void)posix_spawnattr_destroy(attrp); } - if (envlist) { + if (envlist && envlist != environ) { free_string_array(envlist, envc); } if (argvlist) { @@ -16770,6 +16804,9 @@ all_ins(PyObject *m) if (PyModule_AddIntConstant(m, "POSIX_SPAWN_OPEN", POSIX_SPAWN_OPEN)) return -1; if (PyModule_AddIntConstant(m, "POSIX_SPAWN_CLOSE", POSIX_SPAWN_CLOSE)) return -1; if (PyModule_AddIntConstant(m, "POSIX_SPAWN_DUP2", POSIX_SPAWN_DUP2)) return -1; +#ifdef HAVE_POSIX_SPAWN_FILE_ACTIONS_ADDCLOSEFROM_NP + if (PyModule_AddIntMacro(m, POSIX_SPAWN_CLOSEFROM)) return -1; +#endif #endif #if defined(HAVE_SPAWNV) || defined (HAVE_RTPSPAWN) diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index 90aa8cf28f8c5d..c90ad1a3592f67 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -120,6 +120,7 @@ PLATLIBDIR="DLLs"; %(PreprocessorDefinitions) + $(PySourcePath);%(AdditionalIncludeDirectories) diff --git a/Python/bytecodes.c b/Python/bytecodes.c index 68bb15c2b536eb..19e2268046fcdc 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -506,6 +506,7 @@ dummy_func( // specializations, but there is no output. // At the end we just skip over the STORE_FAST. op(_BINARY_OP_INPLACE_ADD_UNICODE, (unused/1, left, right --)) { + TIER_ONE_ONLY assert(next_instr->op.code == STORE_FAST); PyObject **target_local = &GETLOCAL(next_instr->op.arg); DEOPT_IF(*target_local != left); @@ -786,6 +787,7 @@ dummy_func( } inst(INTERPRETER_EXIT, (retval --)) { + TIER_ONE_ONLY assert(frame == &entry_frame); assert(_PyFrame_IsIncomplete(frame)); /* Restore previous frame and return. */ @@ -1072,6 +1074,7 @@ dummy_func( } inst(YIELD_VALUE, (retval -- unused)) { + TIER_ONE_ONLY // NOTE: It's important that YIELD_VALUE never raises an exception! // The compiler treats any exception raised here as a failed close() // or throw() call. @@ -2297,6 +2300,7 @@ dummy_func( } inst(JUMP_FORWARD, (--)) { + TIER_ONE_ONLY JUMPBY(oparg); } @@ -2402,6 +2406,7 @@ dummy_func( macro(POP_JUMP_IF_NOT_NONE) = _IS_NONE + _POP_JUMP_IF_FALSE; inst(JUMP_BACKWARD_NO_INTERRUPT, (--)) { + TIER_ONE_ONLY /* This bytecode is used in the `yield from` or `await` loop. * If there is an interrupt, we want it handled in the innermost * generator or coroutine, so we deliberately do not check it here. @@ -3454,6 +3459,7 @@ dummy_func( // This is secretly a super-instruction inst(CALL_LIST_APPEND, (unused/1, unused/2, callable, self, args[oparg] -- unused)) { + TIER_ONE_ONLY assert(oparg == 1); PyInterpreterState *interp = tstate->interp; DEOPT_IF(callable != interp->callable_cache.list_append); @@ -3792,6 +3798,7 @@ dummy_func( } inst(RETURN_GENERATOR, (--)) { + TIER_ONE_ONLY assert(PyFunction_Check(frame->f_funcobj)); PyFunctionObject *func = (PyFunctionObject *)frame->f_funcobj; PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func); diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index 2519a4ee546a5e..7cc29c8e644d8d 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -375,38 +375,6 @@ break; } - case _BINARY_OP_INPLACE_ADD_UNICODE: { - PyObject *right; - PyObject *left; - right = stack_pointer[-1]; - left = stack_pointer[-2]; - assert(next_instr->op.code == STORE_FAST); - PyObject **target_local = &GETLOCAL(next_instr->op.arg); - if (*target_local != left) goto deoptimize; - STAT_INC(BINARY_OP, hit); - /* Handle `left = left + right` or `left += right` for str. - * - * When possible, extend `left` in place rather than - * allocating a new PyUnicodeObject. This attempts to avoid - * quadratic behavior when one neglects to use str.join(). - * - * If `left` has only two references remaining (one from - * the stack, one in the locals), DECREFing `left` leaves - * only the locals reference, so PyUnicode_Append knows - * that the string is safe to mutate. - */ - assert(Py_REFCNT(left) >= 2); - _Py_DECREF_NO_DEALLOC(left); - PyUnicode_Append(target_local, right); - _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); - if (*target_local == NULL) goto pop_2_error_tier_two; - // The STORE_FAST is already done. - assert(next_instr->op.code == STORE_FAST); - SKIP_OVER(1); - stack_pointer += -2; - break; - } - case _BINARY_SUBSCR: { PyObject *sub; PyObject *container; @@ -690,18 +658,6 @@ break; } - case _INTERPRETER_EXIT: { - PyObject *retval; - retval = stack_pointer[-1]; - assert(frame == &entry_frame); - assert(_PyFrame_IsIncomplete(frame)); - /* Restore previous frame and return. */ - tstate->current_frame = frame->previous; - assert(!_PyErr_Occurred(tstate)); - tstate->c_recursion_remaining += PY_EVAL_C_STACK_UNITS; - return retval; - } - case _POP_FRAME: { PyObject *retval; retval = stack_pointer[-1]; @@ -846,33 +802,6 @@ /* _INSTRUMENTED_YIELD_VALUE is not a viable micro-op for tier 2 */ - case _YIELD_VALUE: { - PyObject *retval; - oparg = CURRENT_OPARG(); - retval = stack_pointer[-1]; - // NOTE: It's important that YIELD_VALUE never raises an exception! - // The compiler treats any exception raised here as a failed close() - // or throw() call. - assert(frame != &entry_frame); - frame->instr_ptr = next_instr; - PyGenObject *gen = _PyFrame_GetGenerator(frame); - assert(FRAME_SUSPENDED_YIELD_FROM == FRAME_SUSPENDED + 1); - assert(oparg == 0 || oparg == 1); - gen->gi_frame_state = FRAME_SUSPENDED + oparg; - _PyFrame_SetStackPointer(frame, stack_pointer - 1); - tstate->exc_info = gen->gi_exc_state.previous_item; - gen->gi_exc_state.previous_item = NULL; - _Py_LeaveRecursiveCallPy(tstate); - _PyInterpreterFrame *gen_frame = frame; - frame = tstate->current_frame = frame->previous; - gen_frame->previous = NULL; - _PyFrame_StackPush(frame, retval); - /* We don't know which of these is relevant here, so keep them equal */ - assert(INLINE_CACHE_ENTRIES_SEND == INLINE_CACHE_ENTRIES_FOR_ITER); - LOAD_IP(1 + INLINE_CACHE_ENTRIES_SEND); - goto resume_frame; - } - case _POP_EXCEPT: { PyObject *exc_value; exc_value = stack_pointer[-1]; @@ -1197,7 +1126,7 @@ } null = NULL; stack_pointer[0] = res; - if (oparg & 1) stack_pointer[1] = null; + if (oparg & 1) stack_pointer[1] = null; stack_pointer += 1 + ((oparg & 1)); break; } @@ -1233,7 +1162,7 @@ STAT_INC(LOAD_GLOBAL, hit); null = NULL; stack_pointer[0] = res; - if (oparg & 1) stack_pointer[1] = null; + if (oparg & 1) stack_pointer[1] = null; stack_pointer += 1 + ((oparg & 1)); break; } @@ -1251,7 +1180,7 @@ STAT_INC(LOAD_GLOBAL, hit); null = NULL; stack_pointer[0] = res; - if (oparg & 1) stack_pointer[1] = null; + if (oparg & 1) stack_pointer[1] = null; stack_pointer += 1 + ((oparg & 1)); break; } @@ -1683,7 +1612,7 @@ if (attr == NULL) goto pop_1_error_tier_two; } stack_pointer[-1] = attr; - if (oparg & 1) stack_pointer[0] = self_or_null; + if (oparg & 1) stack_pointer[0] = self_or_null; stack_pointer += ((oparg & 1)); break; } @@ -1723,7 +1652,7 @@ null = NULL; Py_DECREF(owner); stack_pointer[-1] = attr; - if (oparg & 1) stack_pointer[0] = null; + if (oparg & 1) stack_pointer[0] = null; stack_pointer += ((oparg & 1)); break; } @@ -1757,7 +1686,7 @@ null = NULL; Py_DECREF(owner); stack_pointer[-1] = attr; - if (oparg & 1) stack_pointer[0] = null; + if (oparg & 1) stack_pointer[0] = null; stack_pointer += ((oparg & 1)); break; } @@ -1801,7 +1730,7 @@ null = NULL; Py_DECREF(owner); stack_pointer[-1] = attr; - if (oparg & 1) stack_pointer[0] = null; + if (oparg & 1) stack_pointer[0] = null; stack_pointer += ((oparg & 1)); break; } @@ -1821,7 +1750,7 @@ null = NULL; Py_DECREF(owner); stack_pointer[-1] = attr; - if (oparg & 1) stack_pointer[0] = null; + if (oparg & 1) stack_pointer[0] = null; stack_pointer += ((oparg & 1)); break; } @@ -1849,7 +1778,7 @@ null = NULL; Py_DECREF(owner); stack_pointer[-1] = attr; - if (oparg & 1) stack_pointer[0] = null; + if (oparg & 1) stack_pointer[0] = null; stack_pointer += ((oparg & 1)); break; } @@ -2084,12 +2013,6 @@ break; } - case _JUMP_FORWARD: { - oparg = CURRENT_OPARG(); - JUMPBY(oparg); - break; - } - /* _JUMP_BACKWARD is not a viable micro-op for tier 2 */ /* _POP_JUMP_IF_FALSE is not a viable micro-op for tier 2 */ @@ -2111,17 +2034,6 @@ break; } - case _JUMP_BACKWARD_NO_INTERRUPT: { - oparg = CURRENT_OPARG(); - /* This bytecode is used in the `yield from` or `await` loop. - * If there is an interrupt, we want it handled in the innermost - * generator or coroutine, so we deliberately do not check it here. - * (see bpo-30039). - */ - JUMPBY(-oparg); - break; - } - case _GET_LEN: { PyObject *obj; PyObject *len_o; @@ -2555,7 +2467,7 @@ assert(_PyType_HasFeature(Py_TYPE(attr), Py_TPFLAGS_METHOD_DESCRIPTOR)); self = owner; stack_pointer[-1] = attr; - if (1) stack_pointer[0] = self; + if (1) stack_pointer[0] = self; stack_pointer += (((1) ? 1 : 0)); break; } @@ -2575,7 +2487,7 @@ attr = Py_NewRef(descr); self = owner; stack_pointer[-1] = attr; - if (1) stack_pointer[0] = self; + if (1) stack_pointer[0] = self; stack_pointer += (((1) ? 1 : 0)); break; } @@ -2638,7 +2550,7 @@ attr = Py_NewRef(descr); self = owner; stack_pointer[-1] = attr; - if (1) stack_pointer[0] = self; + if (1) stack_pointer[0] = self; stack_pointer += (((1) ? 1 : 0)); break; } @@ -3060,32 +2972,6 @@ break; } - case _CALL_LIST_APPEND: { - PyObject **args; - PyObject *self; - PyObject *callable; - oparg = CURRENT_OPARG(); - args = &stack_pointer[-oparg]; - self = stack_pointer[-1 - oparg]; - callable = stack_pointer[-2 - oparg]; - assert(oparg == 1); - PyInterpreterState *interp = tstate->interp; - if (callable != interp->callable_cache.list_append) goto deoptimize; - assert(self != NULL); - if (!PyList_Check(self)) goto deoptimize; - STAT_INC(CALL, hit); - if (_PyList_AppendTakeRef((PyListObject *)self, args[0]) < 0) { - goto pop_1_error; // Since arg is DECREF'ed already - } - Py_DECREF(self); - Py_DECREF(callable); - STACK_SHRINK(3); - // Skip POP_TOP - assert(next_instr->op.code == POP_TOP); - SKIP_OVER(1); - DISPATCH(); - } - case _CALL_METHOD_DESCRIPTOR_O: { PyObject **args; PyObject *self_or_null; @@ -3307,38 +3193,13 @@ break; } - case _RETURN_GENERATOR: { - assert(PyFunction_Check(frame->f_funcobj)); - PyFunctionObject *func = (PyFunctionObject *)frame->f_funcobj; - PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func); - if (gen == NULL) { - GOTO_ERROR(error); - } - assert(EMPTY()); - _PyFrame_SetStackPointer(frame, stack_pointer); - _PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe; - frame->instr_ptr = next_instr; - _PyFrame_Copy(frame, gen_frame); - assert(frame->frame_obj == NULL); - gen->gi_frame_state = FRAME_CREATED; - gen_frame->owner = FRAME_OWNED_BY_GENERATOR; - _Py_LeaveRecursiveCallPy(tstate); - assert(frame != &entry_frame); - _PyInterpreterFrame *prev = frame->previous; - _PyThreadState_PopFrame(tstate, frame); - frame = tstate->current_frame = prev; - _PyFrame_StackPush(frame, (PyObject *)gen); - LOAD_IP(frame->return_offset); - goto resume_frame; - } - case _BUILD_SLICE: { PyObject *step = NULL; PyObject *stop; PyObject *start; PyObject *slice; oparg = CURRENT_OPARG(); - if (oparg == 3) { step = stack_pointer[-(((oparg == 3) ? 1 : 0))]; } + if (oparg == 3) { step = stack_pointer[-(((oparg == 3) ? 1 : 0))]; } stop = stack_pointer[-1 - (((oparg == 3) ? 1 : 0))]; start = stack_pointer[-2 - (((oparg == 3) ? 1 : 0))]; slice = PySlice_New(start, stop, step); diff --git a/Python/flowgraph.c b/Python/flowgraph.c index fe632082d5a66c..d2e3a7ae441c7f 100644 --- a/Python/flowgraph.c +++ b/Python/flowgraph.c @@ -1110,7 +1110,10 @@ remove_redundant_jumps(cfg_builder *g) { * of that jump. If it is, then the jump instruction is redundant and * can be deleted. */ + assert(no_empty_basic_blocks(g)); + + bool remove_empty_blocks = false; for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { cfg_instr *last = basicblock_last_instr(b); assert(last != NULL); @@ -1122,10 +1125,22 @@ remove_redundant_jumps(cfg_builder *g) { } if (last->i_target == b->b_next) { assert(b->b_next->b_iused); - INSTR_SET_OP0(last, NOP); + if (last->i_loc.lineno == NO_LOCATION.lineno) { + b->b_iused--; + if (b->b_iused == 0) { + remove_empty_blocks = true; + } + } + else { + INSTR_SET_OP0(last, NOP); + } } } } + if (remove_empty_blocks) { + eliminate_empty_basic_blocks(g); + } + assert(no_empty_basic_blocks(g)); return SUCCESS; } diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index 65e6f11f68b38c..a274427a699a43 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -238,6 +238,7 @@ } // _BINARY_OP_INPLACE_ADD_UNICODE { + TIER_ONE_ONLY assert(next_instr->op.code == STORE_FAST); PyObject **target_local = &GETLOCAL(next_instr->op.arg); DEOPT_IF(*target_local != left, BINARY_OP); @@ -451,6 +452,7 @@ PyObject *sub; PyObject *dict; PyObject *res; + /* Skip 1 cache entry */ sub = stack_pointer[-1]; dict = stack_pointer[-2]; DEOPT_IF(!PyDict_CheckExact(dict), BINARY_SUBSCR); @@ -475,6 +477,7 @@ static_assert(INLINE_CACHE_ENTRIES_BINARY_SUBSCR == 1, "incorrect cache size"); PyObject *sub; PyObject *container; + /* Skip 1 cache entry */ sub = stack_pointer[-1]; container = stack_pointer[-2]; DEOPT_IF(tstate->interp->eval_frame, BINARY_SUBSCR); @@ -508,6 +511,7 @@ PyObject *sub; PyObject *list; PyObject *res; + /* Skip 1 cache entry */ sub = stack_pointer[-1]; list = stack_pointer[-2]; DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); @@ -535,6 +539,7 @@ PyObject *sub; PyObject *str; PyObject *res; + /* Skip 1 cache entry */ sub = stack_pointer[-1]; str = stack_pointer[-2]; DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); @@ -562,6 +567,7 @@ PyObject *sub; PyObject *tuple; PyObject *res; + /* Skip 1 cache entry */ sub = stack_pointer[-1]; tuple = stack_pointer[-2]; DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); @@ -677,7 +683,7 @@ PyObject *stop; PyObject *start; PyObject *slice; - if (oparg == 3) { step = stack_pointer[-(((oparg == 3) ? 1 : 0))]; } + if (oparg == 3) { step = stack_pointer[-(((oparg == 3) ? 1 : 0))]; } stop = stack_pointer[-1 - (((oparg == 3) ? 1 : 0))]; start = stack_pointer[-2 - (((oparg == 3) ? 1 : 0))]; slice = PySlice_New(start, stop, step); @@ -839,6 +845,8 @@ PyObject **args; PyObject *null; PyObject *callable; + /* Skip 1 cache entry */ + /* Skip 2 cache entries */ args = &stack_pointer[-oparg]; null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; @@ -1000,6 +1008,8 @@ PyObject *self_or_null; PyObject *callable; PyObject *res; + /* Skip 1 cache entry */ + /* Skip 2 cache entries */ args = &stack_pointer[-oparg]; self_or_null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; @@ -1034,6 +1044,8 @@ PyObject *self_or_null; PyObject *callable; PyObject *res; + /* Skip 1 cache entry */ + /* Skip 2 cache entries */ args = &stack_pointer[-oparg]; self_or_null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; @@ -1079,6 +1091,8 @@ PyObject *self_or_null; PyObject *callable; PyObject *res; + /* Skip 1 cache entry */ + /* Skip 2 cache entries */ args = &stack_pointer[-oparg]; self_or_null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; @@ -1118,6 +1132,8 @@ PyObject *self_or_null; PyObject *callable; PyObject *res; + /* Skip 1 cache entry */ + /* Skip 2 cache entries */ args = &stack_pointer[-oparg]; self_or_null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; @@ -1160,7 +1176,7 @@ PyObject *callargs; PyObject *func; PyObject *result; - if (oparg & 1) { kwargs = stack_pointer[-((oparg & 1))]; } + if (oparg & 1) { kwargs = stack_pointer[-((oparg & 1))]; } callargs = stack_pointer[-1 - ((oparg & 1))]; func = stack_pointer[-3 - ((oparg & 1))]; // DICT_MERGE is called before this opcode if there are kwargs. @@ -1278,6 +1294,8 @@ PyObject *self_or_null; PyObject *callable; PyObject *res; + /* Skip 1 cache entry */ + /* Skip 2 cache entries */ args = &stack_pointer[-oparg]; self_or_null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; @@ -1407,6 +1425,8 @@ PyObject *self_or_null; PyObject *callable; PyObject *res; + /* Skip 1 cache entry */ + /* Skip 2 cache entries */ args = &stack_pointer[-oparg]; self_or_null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; @@ -1443,9 +1463,12 @@ PyObject **args; PyObject *self; PyObject *callable; + /* Skip 1 cache entry */ + /* Skip 2 cache entries */ args = &stack_pointer[-oparg]; self = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; + TIER_ONE_ONLY assert(oparg == 1); PyInterpreterState *interp = tstate->interp; DEOPT_IF(callable != interp->callable_cache.list_append, CALL); @@ -1473,6 +1496,8 @@ PyObject *self_or_null; PyObject *callable; PyObject *res; + /* Skip 1 cache entry */ + /* Skip 2 cache entries */ args = &stack_pointer[-oparg]; self_or_null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; @@ -1515,6 +1540,8 @@ PyObject *self_or_null; PyObject *callable; PyObject *res; + /* Skip 1 cache entry */ + /* Skip 2 cache entries */ args = &stack_pointer[-oparg]; self_or_null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; @@ -1557,6 +1584,8 @@ PyObject *self_or_null; PyObject *callable; PyObject *res; + /* Skip 1 cache entry */ + /* Skip 2 cache entries */ args = &stack_pointer[-oparg]; self_or_null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; @@ -1601,6 +1630,8 @@ PyObject *self_or_null; PyObject *callable; PyObject *res; + /* Skip 1 cache entry */ + /* Skip 2 cache entries */ args = &stack_pointer[-oparg]; self_or_null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; @@ -1726,6 +1757,7 @@ PyObject **args; PyObject *self_or_null; PyObject *callable; + /* Skip 1 cache entry */ args = &stack_pointer[-oparg]; self_or_null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; @@ -1772,6 +1804,8 @@ PyObject *null; PyObject *callable; PyObject *res; + /* Skip 1 cache entry */ + /* Skip 2 cache entries */ args = &stack_pointer[-oparg]; null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; @@ -1799,6 +1833,8 @@ PyObject *null; PyObject *callable; PyObject *res; + /* Skip 1 cache entry */ + /* Skip 2 cache entries */ args = &stack_pointer[-oparg]; null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; @@ -1826,6 +1862,8 @@ PyObject *null; PyObject *callable; PyObject *res; + /* Skip 1 cache entry */ + /* Skip 2 cache entries */ args = &stack_pointer[-oparg]; null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; @@ -1980,6 +2018,7 @@ PyObject *right; PyObject *left; PyObject *res; + /* Skip 1 cache entry */ right = stack_pointer[-1]; left = stack_pointer[-2]; DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_OP); @@ -2006,6 +2045,7 @@ PyObject *right; PyObject *left; PyObject *res; + /* Skip 1 cache entry */ right = stack_pointer[-1]; left = stack_pointer[-2]; DEOPT_IF(!PyLong_CheckExact(left), COMPARE_OP); @@ -2036,6 +2076,7 @@ PyObject *right; PyObject *left; PyObject *res; + /* Skip 1 cache entry */ right = stack_pointer[-1]; left = stack_pointer[-2]; DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_OP); @@ -2467,6 +2508,7 @@ INSTRUCTION_STATS(FOR_ITER_GEN); static_assert(INLINE_CACHE_ENTRIES_FOR_ITER == 1, "incorrect cache size"); PyObject *iter; + /* Skip 1 cache entry */ iter = stack_pointer[-1]; DEOPT_IF(tstate->interp->eval_frame, FOR_ITER); PyGenObject *gen = (PyGenObject *)iter; @@ -2841,6 +2883,7 @@ _Py_CODEUNIT *this_instr = frame->instr_ptr = next_instr; next_instr += 4; INSTRUCTION_STATS(INSTRUMENTED_CALL); + /* Skip 3 cache entries */ int is_meth = PEEK(oparg + 1) != NULL; int total_args = oparg + is_meth; PyObject *function = PEEK(oparg + 2); @@ -2927,6 +2970,7 @@ _Py_CODEUNIT *this_instr = frame->instr_ptr = next_instr; next_instr += 2; INSTRUCTION_STATS(INSTRUMENTED_FOR_ITER); + /* Skip 1 cache entry */ _Py_CODEUNIT *target; PyObject *iter = TOP(); PyObject *next = (*Py_TYPE(iter)->tp_iternext)(iter); @@ -2974,6 +3018,7 @@ _Py_CODEUNIT *this_instr = frame->instr_ptr = next_instr; next_instr += 2; INSTRUCTION_STATS(INSTRUMENTED_JUMP_BACKWARD); + /* Skip 1 cache entry */ CHECK_EVAL_BREAKER(); INSTRUMENTED_JUMP(this_instr, next_instr - oparg, PY_MONITORING_EVENT_JUMP); DISPATCH(); @@ -2991,6 +3036,7 @@ _Py_CODEUNIT *this_instr = frame->instr_ptr = next_instr; next_instr += 2; INSTRUCTION_STATS(INSTRUMENTED_LOAD_SUPER_ATTR); + /* Skip 1 cache entry */ // cancel out the decrement that will happen in LOAD_SUPER_ATTR; we // don't want to specialize instrumented instructions INCREMENT_ADAPTIVE_COUNTER(this_instr[1].cache); @@ -3001,6 +3047,7 @@ _Py_CODEUNIT *this_instr = frame->instr_ptr = next_instr; next_instr += 2; INSTRUCTION_STATS(INSTRUMENTED_POP_JUMP_IF_FALSE); + /* Skip 1 cache entry */ PyObject *cond = POP(); assert(PyBool_Check(cond)); int flag = Py_IsFalse(cond); @@ -3016,6 +3063,7 @@ _Py_CODEUNIT *this_instr = frame->instr_ptr = next_instr; next_instr += 2; INSTRUCTION_STATS(INSTRUMENTED_POP_JUMP_IF_NONE); + /* Skip 1 cache entry */ PyObject *value = POP(); int flag = Py_IsNone(value); int offset; @@ -3037,6 +3085,7 @@ _Py_CODEUNIT *this_instr = frame->instr_ptr = next_instr; next_instr += 2; INSTRUCTION_STATS(INSTRUMENTED_POP_JUMP_IF_NOT_NONE); + /* Skip 1 cache entry */ PyObject *value = POP(); int offset; int nflag = Py_IsNone(value); @@ -3058,6 +3107,7 @@ _Py_CODEUNIT *this_instr = frame->instr_ptr = next_instr; next_instr += 2; INSTRUCTION_STATS(INSTRUMENTED_POP_JUMP_IF_TRUE); + /* Skip 1 cache entry */ PyObject *cond = POP(); assert(PyBool_Check(cond)); int flag = Py_IsTrue(cond); @@ -3182,6 +3232,7 @@ INSTRUCTION_STATS(INTERPRETER_EXIT); PyObject *retval; retval = stack_pointer[-1]; + TIER_ONE_ONLY assert(frame == &entry_frame); assert(_PyFrame_IsIncomplete(frame)); /* Restore previous frame and return. */ @@ -3213,6 +3264,7 @@ _Py_CODEUNIT *this_instr = frame->instr_ptr = next_instr; next_instr += 2; INSTRUCTION_STATS(JUMP_BACKWARD); + /* Skip 1 cache entry */ CHECK_EVAL_BREAKER(); assert(oparg <= INSTR_OFFSET()); JUMPBY(-oparg); @@ -3253,6 +3305,7 @@ frame->instr_ptr = next_instr; next_instr += 1; INSTRUCTION_STATS(JUMP_BACKWARD_NO_INTERRUPT); + TIER_ONE_ONLY /* This bytecode is used in the `yield from` or `await` loop. * If there is an interrupt, we want it handled in the innermost * generator or coroutine, so we deliberately do not check it here. @@ -3266,6 +3319,7 @@ frame->instr_ptr = next_instr; next_instr += 1; INSTRUCTION_STATS(JUMP_FORWARD); + TIER_ONE_ONLY JUMPBY(oparg); DISPATCH(); } @@ -3380,7 +3434,7 @@ } } stack_pointer[-1] = attr; - if (oparg & 1) stack_pointer[0] = self_or_null; + if (oparg & 1) stack_pointer[0] = self_or_null; stack_pointer += ((oparg & 1)); DISPATCH(); } @@ -3413,7 +3467,7 @@ Py_DECREF(owner); } stack_pointer[-1] = attr; - if (oparg & 1) stack_pointer[0] = null; + if (oparg & 1) stack_pointer[0] = null; stack_pointer += ((oparg & 1)); DISPATCH(); } @@ -3424,6 +3478,7 @@ INSTRUCTION_STATS(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); PyObject *owner; + /* Skip 1 cache entry */ owner = stack_pointer[-1]; uint32_t type_version = read_u32(&this_instr[2].cache); uint32_t func_version = read_u32(&this_instr[4].cache); @@ -3489,7 +3544,7 @@ } /* Skip 5 cache entries */ stack_pointer[-1] = attr; - if (oparg & 1) stack_pointer[0] = null; + if (oparg & 1) stack_pointer[0] = null; stack_pointer += ((oparg & 1)); DISPATCH(); } @@ -3531,7 +3586,7 @@ self = owner; } stack_pointer[-1] = attr; - if (1) stack_pointer[0] = self; + if (1) stack_pointer[0] = self; stack_pointer += (((1) ? 1 : 0)); DISPATCH(); } @@ -3566,7 +3621,7 @@ self = owner; } stack_pointer[-1] = attr; - if (1) stack_pointer[0] = self; + if (1) stack_pointer[0] = self; stack_pointer += (((1) ? 1 : 0)); DISPATCH(); } @@ -3613,7 +3668,7 @@ self = owner; } stack_pointer[-1] = attr; - if (1) stack_pointer[0] = self; + if (1) stack_pointer[0] = self; stack_pointer += (((1) ? 1 : 0)); DISPATCH(); } @@ -3652,7 +3707,7 @@ } /* Skip 5 cache entries */ stack_pointer[-1] = attr; - if (oparg & 1) stack_pointer[0] = null; + if (oparg & 1) stack_pointer[0] = null; stack_pointer += ((oparg & 1)); DISPATCH(); } @@ -3738,6 +3793,7 @@ INSTRUCTION_STATS(LOAD_ATTR_PROPERTY); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); PyObject *owner; + /* Skip 1 cache entry */ owner = stack_pointer[-1]; uint32_t type_version = read_u32(&this_instr[2].cache); uint32_t func_version = read_u32(&this_instr[4].cache); @@ -3794,7 +3850,7 @@ } /* Skip 5 cache entries */ stack_pointer[-1] = attr; - if (oparg & 1) stack_pointer[0] = null; + if (oparg & 1) stack_pointer[0] = null; stack_pointer += ((oparg & 1)); DISPATCH(); } @@ -3850,7 +3906,7 @@ } /* Skip 5 cache entries */ stack_pointer[-1] = attr; - if (oparg & 1) stack_pointer[0] = null; + if (oparg & 1) stack_pointer[0] = null; stack_pointer += ((oparg & 1)); DISPATCH(); } @@ -4078,7 +4134,7 @@ null = NULL; } stack_pointer[0] = res; - if (oparg & 1) stack_pointer[1] = null; + if (oparg & 1) stack_pointer[1] = null; stack_pointer += 1 + ((oparg & 1)); DISPATCH(); } @@ -4119,7 +4175,7 @@ null = NULL; } stack_pointer[0] = res; - if (oparg & 1) stack_pointer[1] = null; + if (oparg & 1) stack_pointer[1] = null; stack_pointer += 1 + ((oparg & 1)); DISPATCH(); } @@ -4153,7 +4209,7 @@ null = NULL; } stack_pointer[0] = res; - if (oparg & 1) stack_pointer[1] = null; + if (oparg & 1) stack_pointer[1] = null; stack_pointer += 1 + ((oparg & 1)); DISPATCH(); } @@ -4281,7 +4337,7 @@ null = NULL; } stack_pointer[-3] = attr; - if (oparg & 1) stack_pointer[-2] = null; + if (oparg & 1) stack_pointer[-2] = null; stack_pointer += -2 + ((oparg & 1)); DISPATCH(); } @@ -4295,6 +4351,7 @@ PyObject *class; PyObject *global_super; PyObject *attr; + /* Skip 1 cache entry */ self = stack_pointer[-1]; class = stack_pointer[-2]; global_super = stack_pointer[-3]; @@ -4323,6 +4380,7 @@ PyObject *global_super; PyObject *attr; PyObject *self_or_null; + /* Skip 1 cache entry */ self = stack_pointer[-1]; class = stack_pointer[-2]; global_super = stack_pointer[-3]; @@ -4793,6 +4851,7 @@ frame->instr_ptr = next_instr; next_instr += 1; INSTRUCTION_STATS(RETURN_GENERATOR); + TIER_ONE_ONLY assert(PyFunction_Check(frame->f_funcobj)); PyFunctionObject *func = (PyFunctionObject *)frame->f_funcobj; PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func); @@ -4921,6 +4980,7 @@ static_assert(INLINE_CACHE_ENTRIES_SEND == 1, "incorrect cache size"); PyObject *v; PyObject *receiver; + /* Skip 1 cache entry */ v = stack_pointer[-1]; receiver = stack_pointer[-2]; DEOPT_IF(tstate->interp->eval_frame, SEND); @@ -5151,6 +5211,7 @@ static_assert(INLINE_CACHE_ENTRIES_STORE_ATTR == 4, "incorrect cache size"); PyObject *owner; PyObject *value; + /* Skip 1 cache entry */ owner = stack_pointer[-1]; value = stack_pointer[-2]; uint32_t type_version = read_u32(&this_instr[2].cache); @@ -5368,6 +5429,7 @@ PyObject *sub; PyObject *dict; PyObject *value; + /* Skip 1 cache entry */ sub = stack_pointer[-1]; dict = stack_pointer[-2]; value = stack_pointer[-3]; @@ -5388,6 +5450,7 @@ PyObject *sub; PyObject *list; PyObject *value; + /* Skip 1 cache entry */ sub = stack_pointer[-1]; list = stack_pointer[-2]; value = stack_pointer[-3]; @@ -5464,6 +5527,7 @@ static_assert(INLINE_CACHE_ENTRIES_TO_BOOL == 3, "incorrect cache size"); PyObject *value; PyObject *res; + /* Skip 1 cache entry */ value = stack_pointer[-1]; uint32_t version = read_u32(&this_instr[2].cache); // This one is a bit weird, because we expect *some* failures: @@ -5482,6 +5546,8 @@ INSTRUCTION_STATS(TO_BOOL_BOOL); static_assert(INLINE_CACHE_ENTRIES_TO_BOOL == 3, "incorrect cache size"); PyObject *value; + /* Skip 1 cache entry */ + /* Skip 2 cache entries */ value = stack_pointer[-1]; DEOPT_IF(!PyBool_Check(value), TO_BOOL); STAT_INC(TO_BOOL, hit); @@ -5495,6 +5561,8 @@ static_assert(INLINE_CACHE_ENTRIES_TO_BOOL == 3, "incorrect cache size"); PyObject *value; PyObject *res; + /* Skip 1 cache entry */ + /* Skip 2 cache entries */ value = stack_pointer[-1]; DEOPT_IF(!PyLong_CheckExact(value), TO_BOOL); STAT_INC(TO_BOOL, hit); @@ -5517,6 +5585,8 @@ static_assert(INLINE_CACHE_ENTRIES_TO_BOOL == 3, "incorrect cache size"); PyObject *value; PyObject *res; + /* Skip 1 cache entry */ + /* Skip 2 cache entries */ value = stack_pointer[-1]; DEOPT_IF(!PyList_CheckExact(value), TO_BOOL); STAT_INC(TO_BOOL, hit); @@ -5533,6 +5603,8 @@ static_assert(INLINE_CACHE_ENTRIES_TO_BOOL == 3, "incorrect cache size"); PyObject *value; PyObject *res; + /* Skip 1 cache entry */ + /* Skip 2 cache entries */ value = stack_pointer[-1]; // This one is a bit weird, because we expect *some* failures: DEOPT_IF(!Py_IsNone(value), TO_BOOL); @@ -5549,6 +5621,8 @@ static_assert(INLINE_CACHE_ENTRIES_TO_BOOL == 3, "incorrect cache size"); PyObject *value; PyObject *res; + /* Skip 1 cache entry */ + /* Skip 2 cache entries */ value = stack_pointer[-1]; DEOPT_IF(!PyUnicode_CheckExact(value), TO_BOOL); STAT_INC(TO_BOOL, hit); @@ -5663,6 +5737,7 @@ static_assert(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE == 1, "incorrect cache size"); PyObject *seq; PyObject **values; + /* Skip 1 cache entry */ seq = stack_pointer[-1]; values = &stack_pointer[-1]; DEOPT_IF(!PyList_CheckExact(seq), UNPACK_SEQUENCE); @@ -5684,6 +5759,7 @@ static_assert(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE == 1, "incorrect cache size"); PyObject *seq; PyObject **values; + /* Skip 1 cache entry */ seq = stack_pointer[-1]; values = &stack_pointer[-1]; DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE); @@ -5705,6 +5781,7 @@ static_assert(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE == 1, "incorrect cache size"); PyObject *seq; PyObject **values; + /* Skip 1 cache entry */ seq = stack_pointer[-1]; values = &stack_pointer[-1]; DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE); @@ -5764,6 +5841,7 @@ INSTRUCTION_STATS(YIELD_VALUE); PyObject *retval; retval = stack_pointer[-1]; + TIER_ONE_ONLY // NOTE: It's important that YIELD_VALUE never raises an exception! // The compiler treats any exception raised here as a failed close() // or throw() call. diff --git a/Python/lock.c b/Python/lock.c index e9279f0b92a5e7..f0ff1176941da8 100644 --- a/Python/lock.c +++ b/Python/lock.c @@ -353,3 +353,109 @@ _PyOnceFlag_CallOnceSlow(_PyOnceFlag *flag, _Py_once_fn_t *fn, void *arg) v = _Py_atomic_load_uint8(&flag->v); } } + +#define _Py_WRITE_LOCKED 1 +#define _PyRWMutex_READER_SHIFT 2 +#define _Py_RWMUTEX_MAX_READERS (UINTPTR_MAX >> _PyRWMutex_READER_SHIFT) + +static uintptr_t +rwmutex_set_parked_and_wait(_PyRWMutex *rwmutex, uintptr_t bits) +{ + // Set _Py_HAS_PARKED and wait until we are woken up. + if ((bits & _Py_HAS_PARKED) == 0) { + uintptr_t newval = bits | _Py_HAS_PARKED; + if (!_Py_atomic_compare_exchange_uintptr(&rwmutex->bits, + &bits, newval)) { + return bits; + } + bits = newval; + } + + _PyParkingLot_Park(&rwmutex->bits, &bits, sizeof(bits), -1, NULL, 1); + return _Py_atomic_load_uintptr_relaxed(&rwmutex->bits); +} + +// The number of readers holding the lock +static uintptr_t +rwmutex_reader_count(uintptr_t bits) +{ + return bits >> _PyRWMutex_READER_SHIFT; +} + +void +_PyRWMutex_RLock(_PyRWMutex *rwmutex) +{ + uintptr_t bits = _Py_atomic_load_uintptr_relaxed(&rwmutex->bits); + for (;;) { + if ((bits & _Py_WRITE_LOCKED)) { + // A writer already holds the lock. + bits = rwmutex_set_parked_and_wait(rwmutex, bits); + continue; + } + else if ((bits & _Py_HAS_PARKED)) { + // Reader(s) hold the lock (or just gave up the lock), but there is + // at least one waiting writer. We can't grab the lock because we + // don't want to starve the writer. Instead, we park ourselves and + // wait for the writer to eventually wake us up. + bits = rwmutex_set_parked_and_wait(rwmutex, bits); + continue; + } + else { + // The lock is unlocked or read-locked. Try to grab it. + assert(rwmutex_reader_count(bits) < _Py_RWMUTEX_MAX_READERS); + uintptr_t newval = bits + (1 << _PyRWMutex_READER_SHIFT); + if (!_Py_atomic_compare_exchange_uintptr(&rwmutex->bits, + &bits, newval)) { + continue; + } + return; + } + } +} + +void +_PyRWMutex_RUnlock(_PyRWMutex *rwmutex) +{ + uintptr_t bits = _Py_atomic_add_uintptr(&rwmutex->bits, -(1 << _PyRWMutex_READER_SHIFT)); + assert(rwmutex_reader_count(bits) > 0 && "lock was not read-locked"); + bits -= (1 << _PyRWMutex_READER_SHIFT); + + if (rwmutex_reader_count(bits) == 0 && (bits & _Py_HAS_PARKED)) { + _PyParkingLot_UnparkAll(&rwmutex->bits); + return; + } +} + +void +_PyRWMutex_Lock(_PyRWMutex *rwmutex) +{ + uintptr_t bits = _Py_atomic_load_uintptr_relaxed(&rwmutex->bits); + for (;;) { + // If there are no active readers and it's not already write-locked, + // then we can grab the lock. + if ((bits & ~_Py_HAS_PARKED) == 0) { + if (!_Py_atomic_compare_exchange_uintptr(&rwmutex->bits, + &bits, + bits | _Py_WRITE_LOCKED)) { + continue; + } + return; + } + + // Otherwise, we have to wait. + bits = rwmutex_set_parked_and_wait(rwmutex, bits); + } +} + +void +_PyRWMutex_Unlock(_PyRWMutex *rwmutex) +{ + uintptr_t old_bits = _Py_atomic_exchange_uintptr(&rwmutex->bits, 0); + + assert((old_bits & _Py_WRITE_LOCKED) && "lock was not write-locked"); + assert(rwmutex_reader_count(old_bits) == 0 && "lock was read-locked"); + + if ((old_bits & _Py_HAS_PARKED) != 0) { + _PyParkingLot_UnparkAll(&rwmutex->bits); + } +} diff --git a/Python/pystate.c b/Python/pystate.c index e18eb0186d0010..632a119ea6d4f8 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -1951,6 +1951,20 @@ _PyThreadState_Bind(PyThreadState *tstate) } } +#if defined(Py_GIL_DISABLED) && !defined(Py_LIMITED_API) +uintptr_t +_Py_GetThreadLocal_Addr(void) +{ +#ifdef HAVE_THREAD_LOCAL + // gh-112535: Use the address of the thread-local PyThreadState variable as + // a unique identifier for the current thread. Each thread has a unique + // _Py_tss_tstate variable with a unique address. + return (uintptr_t)&_Py_tss_tstate; +#else +# error "no supported thread-local variable storage classifier" +#endif +} +#endif /***********************************/ /* routines for advanced debuggers */ diff --git a/Tools/build/generate_sbom.py b/Tools/build/generate_sbom.py index 0089db81af9b9d..c02eb88b46532f 100644 --- a/Tools/build/generate_sbom.py +++ b/Tools/build/generate_sbom.py @@ -50,7 +50,7 @@ class PackageFiles(typing.NamedTuple): include=["Modules/expat/**"] ), "pip": PackageFiles( - include=["Lib/ensurepip/_bundled/pip-23.3.1-py3-none-any.whl"] + include=["Lib/ensurepip/_bundled/pip-23.3.2-py3-none-any.whl"] ), "macholib": PackageFiles( include=["Lib/ctypes/macholib/**"], diff --git a/Tools/cases_generator/analyzer.py b/Tools/cases_generator/analyzer.py index bcc13538e51d9b..e077eb0a8ed203 100644 --- a/Tools/cases_generator/analyzer.py +++ b/Tools/cases_generator/analyzer.py @@ -234,9 +234,9 @@ def analyze_stack(op: parser.InstDef) -> StackEffect: return StackEffect(inputs, outputs) -def analyze_caches(op: parser.InstDef) -> list[CacheEntry]: +def analyze_caches(inputs: list[parser.InputEffect]) -> list[CacheEntry]: caches: list[parser.CacheEffect] = [ - i for i in op.inputs if isinstance(i, parser.CacheEffect) + i for i in inputs if isinstance(i, parser.CacheEffect) ] return [CacheEntry(i.name, int(i.size)) for i in caches] @@ -314,13 +314,13 @@ def compute_properties(op: parser.InstDef) -> Properties: ) -def make_uop(name: str, op: parser.InstDef) -> Uop: +def make_uop(name: str, op: parser.InstDef, inputs: list[parser.InputEffect]) -> Uop: return Uop( name=name, context=op.context, annotations=op.annotations, stack=analyze_stack(op), - caches=analyze_caches(op), + caches=analyze_caches(inputs), body=op.block.tokens, properties=compute_properties(op), ) @@ -333,7 +333,7 @@ def add_op(op: parser.InstDef, uops: dict[str, Uop]) -> None: raise override_error( op.name, op.context, uops[op.name].context, op.tokens[0] ) - uops[op.name] = make_uop(op.name, op) + uops[op.name] = make_uop(op.name, op, op.inputs) def add_instruction( @@ -347,10 +347,27 @@ def desugar_inst( ) -> None: assert inst.kind == "inst" name = inst.name - uop = make_uop("_" + inst.name, inst) + op_inputs: list[parser.InputEffect] = [] + parts: list[Part] = [] + uop_index = -1 + # Move unused cache entries to the Instruction, removing them from the Uop. + for input in inst.inputs: + if isinstance(input, parser.CacheEffect) and input.name == "unused": + parts.append(Skip(input.size)) + else: + op_inputs.append(input) + if uop_index < 0: + uop_index = len(parts) + # Place holder for the uop. + parts.append(Skip(0)) + uop = make_uop("_" + inst.name, inst, op_inputs) uop.implicitly_created = True uops[inst.name] = uop - add_instruction(name, [uop], instructions) + if uop_index < 0: + parts.append(uop) + else: + parts[uop_index] = uop + add_instruction(name, parts, instructions) def add_macro( @@ -444,7 +461,8 @@ def analyze_forest(forest: list[parser.AstNode]) -> Analysis: if target.text in instructions: instructions[target.text].is_target = True # Hack - instructions["BINARY_OP_INPLACE_ADD_UNICODE"].family = families["BINARY_OP"] + if "BINARY_OP_INPLACE_ADD_UNICODE" in instructions: + instructions["BINARY_OP_INPLACE_ADD_UNICODE"].family = families["BINARY_OP"] return Analysis(instructions, uops, families, pseudos) diff --git a/Tools/cases_generator/cwriter.py b/Tools/cases_generator/cwriter.py index 34e39855a9b40a..67b1c9a169024c 100644 --- a/Tools/cases_generator/cwriter.py +++ b/Tools/cases_generator/cwriter.py @@ -38,7 +38,8 @@ def maybe_dedent(self, txt: str) -> None: parens = txt.count("(") - txt.count(")") if parens < 0: self.indents.pop() - elif "}" in txt or is_label(txt): + braces = txt.count("{") - txt.count("}") + if braces < 0 or is_label(txt): self.indents.pop() def maybe_indent(self, txt: str) -> None: @@ -50,11 +51,13 @@ def maybe_indent(self, txt: str) -> None: self.indents.append(offset) if is_label(txt): self.indents.append(self.indents[-1] + 4) - elif "{" in txt: - if 'extern "C"' in txt: - self.indents.append(self.indents[-1]) - else: - self.indents.append(self.indents[-1] + 4) + else: + braces = txt.count("{") - txt.count("}") + if braces > 0: + if 'extern "C"' in txt: + self.indents.append(self.indents[-1]) + else: + self.indents.append(self.indents[-1] + 4) def emit_text(self, txt: str) -> None: self.out.write(txt) diff --git a/Tools/cases_generator/generate_cases.py b/Tools/cases_generator/generate_cases.py index c6ed5911b846bf..50bc14a57fc584 100644 --- a/Tools/cases_generator/generate_cases.py +++ b/Tools/cases_generator/generate_cases.py @@ -209,8 +209,9 @@ def write_function( "", ): with self.out.block("switch(opcode)"): - for instr, effect in data: - self.out.emit(f"case {instr.name}:") + effects = [(instr.name, effect) for instr, effect in data] + for name, effect in sorted(effects): + self.out.emit(f"case {name}:") self.out.emit(f" return {effect};") self.out.emit("default:") self.out.emit(" return -1;") @@ -433,7 +434,8 @@ def write_metadata(self, metadata_filename: str, pymetadata_filename: str) -> No ";", ): # Write metadata for each instruction - for thing in self.everything: + sorted_things = sorted(self.everything, key = lambda t:t.name) + for thing in sorted_things: match thing: case parsing.InstDef(): self.write_metadata_for_inst(self.instrs[thing.name]) @@ -456,7 +458,7 @@ def write_metadata(self, metadata_filename: str, pymetadata_filename: str) -> No ";", ): # Write macro expansion for each non-pseudo instruction - for mac in self.macro_instrs.values(): + for mac in sorted(self.macro_instrs.values(), key=lambda t: t.name): if is_super_instruction(mac): # Special-case the heck out of super-instructions self.write_super_expansions(mac.name) @@ -475,7 +477,7 @@ def write_metadata(self, metadata_filename: str, pymetadata_filename: str) -> No "=", ";", ): - for name in self.opmap: + for name in sorted(self.opmap): self.out.emit(f'[{name}] = "{name}",') with self.metadata_item( @@ -589,7 +591,7 @@ def add(name: str) -> None: add("_EXIT_TRACE") add("_SET_IP") - for instr in self.instrs.values(): + for instr in sorted(self.instrs.values(), key=lambda t:t.name): # Skip ops that are also macros -- those are desugared inst()s if instr.name not in self.macros: add(instr.name) diff --git a/Tools/cases_generator/generators_common.py b/Tools/cases_generator/generators_common.py index e0674a7343498d..1b565bff2c56f6 100644 --- a/Tools/cases_generator/generators_common.py +++ b/Tools/cases_generator/generators_common.py @@ -22,8 +22,10 @@ def root_relative_path(filename: str) -> str: - return Path(filename).absolute().relative_to(ROOT).as_posix() - + try: + return Path(filename).absolute().relative_to(ROOT).as_posix() + except ValueError: + return filename def write_header(generator: str, sources: list[str], outfile: TextIO) -> None: outfile.write( diff --git a/Tools/cases_generator/parser.py b/Tools/cases_generator/parser.py index 12173a61199700..fe4e8e476eadee 100644 --- a/Tools/cases_generator/parser.py +++ b/Tools/cases_generator/parser.py @@ -7,6 +7,7 @@ Context, CacheEffect, StackEffect, + InputEffect, OpName, AstNode, ) diff --git a/Tools/cases_generator/stack.py b/Tools/cases_generator/stack.py index c36a56ebf2d381..0b31ce4090f552 100644 --- a/Tools/cases_generator/stack.py +++ b/Tools/cases_generator/stack.py @@ -148,7 +148,7 @@ def flush(self, out: CWriter) -> None: cast = "(PyObject *)" if var.type else "" if var.name != "unused" and not var.is_array(): if var.condition: - out.emit(f" if ({var.condition}) ") + out.emit(f"if ({var.condition}) ") out.emit( f"stack_pointer[{self.base_offset.to_c()}] = {cast}{var.name};\n" ) diff --git a/Tools/cases_generator/tier1_generator.py b/Tools/cases_generator/tier1_generator.py index 11885dca6fe1a2..49cede978d821a 100644 --- a/Tools/cases_generator/tier1_generator.py +++ b/Tools/cases_generator/tier1_generator.py @@ -151,7 +151,8 @@ def generate_tier1( stack = Stack() for part in inst.parts: # Only emit braces if more than one uop - offset = write_uop(part, out, offset, stack, inst, len(inst.parts) > 1) + insert_braces = len([p for p in inst.parts if isinstance(p, Uop)]) > 1 + offset = write_uop(part, out, offset, stack, inst, insert_braces) out.start_line() if not inst.parts[-1].properties.always_exits: stack.flush(out) @@ -181,6 +182,14 @@ def generate_tier1( "input", nargs=argparse.REMAINDER, help="Instruction definition file(s)" ) + +def generate_tier1_from_files( + filenames: list[str], outfilename: str, lines: bool +) -> None: + data = analyze_files(filenames) + with open(outfilename, "w") as outfile: + generate_tier1(filenames, data, outfile, lines) + if __name__ == "__main__": args = arg_parser.parse_args() if len(args.input) == 0: diff --git a/Tools/clinic/clinic.py b/Tools/clinic/clinic.py index 1637ca1428c11b..c230cfa3e31574 100755 --- a/Tools/clinic/clinic.py +++ b/Tools/clinic/clinic.py @@ -5638,6 +5638,10 @@ def state_modulename_name(self, line: str) -> None: function_name = fields.pop() module, cls = self.clinic._module_and_class(fields) + if self.kind in {GETTER, SETTER}: + if not cls: + fail("@getter and @setter must be methods") + self.update_function_kind(full_name) if self.kind is METHOD_INIT and not return_converter: return_converter = init_return_converter() diff --git a/configure b/configure index 668a0efd77db0e..7e50abc29d0c1a 100755 --- a/configure +++ b/configure @@ -17779,6 +17779,12 @@ if test "x$ac_cv_func_posix_spawnp" = xyes then : printf "%s\n" "#define HAVE_POSIX_SPAWNP 1" >>confdefs.h +fi +ac_fn_c_check_func "$LINENO" "posix_spawn_file_actions_addclosefrom_np" "ac_cv_func_posix_spawn_file_actions_addclosefrom_np" +if test "x$ac_cv_func_posix_spawn_file_actions_addclosefrom_np" = xyes +then : + printf "%s\n" "#define HAVE_POSIX_SPAWN_FILE_ACTIONS_ADDCLOSEFROM_NP 1" >>confdefs.h + fi ac_fn_c_check_func "$LINENO" "pread" "ac_cv_func_pread" if test "x$ac_cv_func_pread" = xyes diff --git a/configure.ac b/configure.ac index 020553abd71b4f..e064848af9ed1b 100644 --- a/configure.ac +++ b/configure.ac @@ -4757,6 +4757,7 @@ AC_CHECK_FUNCS([ \ lockf lstat lutimes madvise mbrtowc memrchr mkdirat mkfifo mkfifoat \ mknod mknodat mktime mmap mremap nice openat opendir pathconf pause pipe \ pipe2 plock poll posix_fadvise posix_fallocate posix_spawn posix_spawnp \ + posix_spawn_file_actions_addclosefrom_np \ pread preadv preadv2 pthread_condattr_setclock pthread_init pthread_kill \ pwrite pwritev pwritev2 readlink readlinkat readv realpath renameat \ rtpSpawn sched_get_priority_max sched_rr_get_interval sched_setaffinity \ diff --git a/pyconfig.h.in b/pyconfig.h.in index 9c429c03722383..d8a9f68951afbd 100644 --- a/pyconfig.h.in +++ b/pyconfig.h.in @@ -905,6 +905,10 @@ /* Define to 1 if you have the `posix_spawnp' function. */ #undef HAVE_POSIX_SPAWNP +/* Define to 1 if you have the `posix_spawn_file_actions_addclosefrom_np' + function. */ +#undef HAVE_POSIX_SPAWN_FILE_ACTIONS_ADDCLOSEFROM_NP + /* Define to 1 if you have the `pread' function. */ #undef HAVE_PREAD