diff --git a/docs/source/error_code_list2.rst b/docs/source/error_code_list2.rst index 465d1c7a6583c..2b765e412913d 100644 --- a/docs/source/error_code_list2.rst +++ b/docs/source/error_code_list2.rst @@ -5,8 +5,8 @@ Error codes for optional checks This section documents various errors codes that mypy generates only if you enable certain options. See :ref:`error-codes` for general -documentation about error codes. :ref:`error-code-list` documents -error codes that are enabled by default. +documentation about error codes and their configuration. +:ref:`error-code-list` documents error codes that are enabled by default. .. note:: @@ -241,7 +241,7 @@ mypy generates an error if it thinks that an expression is redundant. .. code-block:: python - # Use "mypy --enable-error-code redundant-expr ..." + # mypy: enable-error-code="redundant-expr" def example(x: int) -> None: # Error: Left operand of "and" is always true [redundant-expr] @@ -268,7 +268,7 @@ example: .. code-block:: python - # Use "mypy --enable-error-code possibly-undefined ..." + # mypy: enable-error-code="possibly-undefined" from typing import Iterable @@ -297,7 +297,7 @@ Using an iterable value in a boolean context has a separate error code .. code-block:: python - # Use "mypy --enable-error-code truthy-bool ..." + # mypy: enable-error-code="truthy-bool" class Foo: pass @@ -347,7 +347,7 @@ Example: .. code-block:: python - # Use "mypy --enable-error-code ignore-without-code ..." + # mypy: enable-error-code="ignore-without-code" class Foo: def __init__(self, name: str) -> None: @@ -378,7 +378,7 @@ Example: .. code-block:: python - # Use "mypy --enable-error-code unused-awaitable ..." + # mypy: enable-error-code="unused-awaitable" import asyncio @@ -462,7 +462,7 @@ Example: .. code-block:: python - # Use "mypy --enable-error-code explicit-override ..." + # mypy: enable-error-code="explicit-override" from typing import override @@ -536,7 +536,7 @@ Now users can actually import ``reveal_type`` to make the runtime code safe. .. code-block:: python - # Use "mypy --enable-error-code unimported-reveal" + # mypy: enable-error-code="unimported-reveal" x = 1 reveal_type(x) # Note: Revealed type is "builtins.int" \ @@ -546,7 +546,7 @@ Correct usage: .. code-block:: python - # Use "mypy --enable-error-code unimported-reveal" + # mypy: enable-error-code="unimported-reveal" from typing import reveal_type # or `typing_extensions` x = 1 diff --git a/docs/source/error_codes.rst b/docs/source/error_codes.rst index 35fad161f8a25..485d70cb59bc8 100644 --- a/docs/source/error_codes.rst +++ b/docs/source/error_codes.rst @@ -87,9 +87,13 @@ still keep the other two error codes enabled. The overall logic is following: * Individual config sections *adjust* them per glob/module -* Inline ``# mypy: disable-error-code="..."`` comments can further - *adjust* them for a specific module. - For example: ``# mypy: disable-error-code="truthy-bool, ignore-without-code"`` +* Inline ``# mypy: disable-error-code="..."`` and ``# mypy: enable-error-code="..."`` + comments can further *adjust* them for a specific file. + For example: + +.. code-block:: python + + # mypy: enable-error-code="truthy-bool, ignore-without-code" So one can e.g. enable some code globally, disable it for all tests in the corresponding config section, and then re-enable it with an inline diff --git a/misc/typeshed_patches/0001-Remove-use-of-LiteralString-in-builtins-13743.patch b/misc/typeshed_patches/0001-Remove-use-of-LiteralString-in-builtins-13743.patch index 6a0977dfc4891..683b0c322b710 100644 --- a/misc/typeshed_patches/0001-Remove-use-of-LiteralString-in-builtins-13743.patch +++ b/misc/typeshed_patches/0001-Remove-use-of-LiteralString-in-builtins-13743.patch @@ -1,14 +1,14 @@ -From 5c00e362d40aa26e0a22a740f05a52d05edf0f91 Mon Sep 17 00:00:00 2001 +From 3ec9b878d6bbe3fae64a508a62372f10a886406f Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Mon, 26 Sep 2022 12:55:07 -0700 Subject: [PATCH] Remove use of LiteralString in builtins (#13743) --- - mypy/typeshed/stdlib/builtins.pyi | 88 ------------------------------- - 1 file changed, 88 deletions(-) + mypy/typeshed/stdlib/builtins.pyi | 95 ------------------------------- + 1 file changed, 95 deletions(-) diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi -index b4765b26c..99919c64c 100644 +index 53e00ec6a..bad3250ef 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -61,7 +61,6 @@ from typing import ( # noqa: Y022 @@ -19,7 +19,7 @@ index b4765b26c..99919c64c 100644 ParamSpec, Self, TypeAlias, -@@ -434,31 +433,16 @@ class str(Sequence[str]): +@@ -435,31 +434,16 @@ class str(Sequence[str]): def __new__(cls, object: object = ...) -> Self: ... @overload def __new__(cls, object: ReadableBuffer, encoding: str = ..., errors: str = ...) -> Self: ... @@ -49,9 +49,9 @@ index b4765b26c..99919c64c 100644 - def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: ... - @overload def format(self, *args: object, **kwargs: object) -> str: ... - def format_map(self, map: _FormatMapMapping) -> str: ... + def format_map(self, mapping: _FormatMapMapping, /) -> str: ... def index(self, sub: str, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /) -> int: ... -@@ -474,89 +458,32 @@ class str(Sequence[str]): +@@ -475,99 +459,35 @@ class str(Sequence[str]): def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... @@ -75,10 +75,20 @@ index b4765b26c..99919c64c 100644 - def partition(self: LiteralString, sep: LiteralString, /) -> tuple[LiteralString, LiteralString, LiteralString]: ... - @overload def partition(self, sep: str, /) -> tuple[str, str, str]: ... # type: ignore[misc] -- @overload -- def replace(self: LiteralString, old: LiteralString, new: LiteralString, count: SupportsIndex = -1, /) -> LiteralString: ... -- @overload - def replace(self, old: str, new: str, count: SupportsIndex = -1, /) -> str: ... # type: ignore[misc] + if sys.version_info >= (3, 13): +- @overload +- def replace( +- self: LiteralString, old: LiteralString, new: LiteralString, /, count: SupportsIndex = -1 +- ) -> LiteralString: ... +- @overload + def replace(self, old: str, new: str, /, count: SupportsIndex = -1) -> str: ... # type: ignore[misc] + else: +- @overload +- def replace( +- self: LiteralString, old: LiteralString, new: LiteralString, count: SupportsIndex = -1, / +- ) -> LiteralString: ... +- @overload + def replace(self, old: str, new: str, count: SupportsIndex = -1, /) -> str: ... # type: ignore[misc] if sys.version_info >= (3, 9): - @overload - def removeprefix(self: LiteralString, prefix: LiteralString, /) -> LiteralString: ... @@ -141,7 +151,7 @@ index b4765b26c..99919c64c 100644 def zfill(self, width: SupportsIndex, /) -> str: ... # type: ignore[misc] @staticmethod @overload -@@ -567,9 +494,6 @@ class str(Sequence[str]): +@@ -578,9 +498,6 @@ class str(Sequence[str]): @staticmethod @overload def maketrans(x: str, y: str, z: str, /) -> dict[int, int | None]: ... @@ -151,7 +161,7 @@ index b4765b26c..99919c64c 100644 def __add__(self, value: str, /) -> str: ... # type: ignore[misc] # Incompatible with Sequence.__contains__ def __contains__(self, key: str, /) -> bool: ... # type: ignore[override] -@@ -578,25 +502,13 @@ class str(Sequence[str]): +@@ -589,25 +506,13 @@ class str(Sequence[str]): def __getitem__(self, key: SupportsIndex | slice, /) -> str: ... def __gt__(self, value: str, /) -> bool: ... def __hash__(self) -> int: ... @@ -178,5 +188,5 @@ index b4765b26c..99919c64c 100644 def __getnewargs__(self) -> tuple[str]: ... -- -2.39.3 (Apple Git-146) +2.45.2 diff --git a/mypy/applytype.py b/mypy/applytype.py index eecd555bf90d5..783748cd8a5eb 100644 --- a/mypy/applytype.py +++ b/mypy/applytype.py @@ -1,17 +1,24 @@ from __future__ import annotations -from typing import Callable, Sequence +from typing import Callable, Iterable, Sequence import mypy.subtypes from mypy.erasetype import erase_typevars from mypy.expandtype import expand_type -from mypy.nodes import Context +from mypy.nodes import Context, TypeInfo +from mypy.type_visitor import TypeTranslator +from mypy.typeops import get_all_type_vars from mypy.types import ( AnyType, CallableType, + Instance, + Parameters, + ParamSpecFlavor, ParamSpecType, PartialType, + ProperType, Type, + TypeAliasType, TypeVarId, TypeVarLikeType, TypeVarTupleType, @@ -19,6 +26,7 @@ UninhabitedType, UnpackType, get_proper_type, + remove_dups, ) @@ -93,8 +101,7 @@ def apply_generic_arguments( bound or constraints, instead of giving an error. """ tvars = callable.variables - min_arg_count = sum(not tv.has_default() for tv in tvars) - assert min_arg_count <= len(orig_types) <= len(tvars) + assert len(orig_types) <= len(tvars) # Check that inferred type variable values are compatible with allowed # values and bounds. Also, promote subtype values to allowed values. # Create a map from type variable id to target type. @@ -148,7 +155,7 @@ def apply_generic_arguments( type_is = None # The callable may retain some type vars if only some were applied. - # TODO: move apply_poly() logic from checkexpr.py here when new inference + # TODO: move apply_poly() logic here when new inference # becomes universally used (i.e. in all passes + in unification). # With this new logic we can actually *add* some new free variables. remaining_tvars: list[TypeVarLikeType] = [] @@ -170,3 +177,126 @@ def apply_generic_arguments( type_guard=type_guard, type_is=type_is, ) + + +def apply_poly(tp: CallableType, poly_tvars: Sequence[TypeVarLikeType]) -> CallableType | None: + """Make free type variables generic in the type if possible. + + This will translate the type `tp` while trying to create valid bindings for + type variables `poly_tvars` while traversing the type. This follows the same rules + as we do during semantic analysis phase, examples: + * Callable[Callable[[T], T], T] -> def [T] (def (T) -> T) -> T + * Callable[[], Callable[[T], T]] -> def () -> def [T] (T -> T) + * List[T] -> None (not possible) + """ + try: + return tp.copy_modified( + arg_types=[t.accept(PolyTranslator(poly_tvars)) for t in tp.arg_types], + ret_type=tp.ret_type.accept(PolyTranslator(poly_tvars)), + variables=[], + ) + except PolyTranslationError: + return None + + +class PolyTranslationError(Exception): + pass + + +class PolyTranslator(TypeTranslator): + """Make free type variables generic in the type if possible. + + See docstring for apply_poly() for details. + """ + + def __init__( + self, + poly_tvars: Iterable[TypeVarLikeType], + bound_tvars: frozenset[TypeVarLikeType] = frozenset(), + seen_aliases: frozenset[TypeInfo] = frozenset(), + ) -> None: + self.poly_tvars = set(poly_tvars) + # This is a simplified version of TypeVarScope used during semantic analysis. + self.bound_tvars = bound_tvars + self.seen_aliases = seen_aliases + + def collect_vars(self, t: CallableType | Parameters) -> list[TypeVarLikeType]: + found_vars = [] + for arg in t.arg_types: + for tv in get_all_type_vars(arg): + if isinstance(tv, ParamSpecType): + normalized: TypeVarLikeType = tv.copy_modified( + flavor=ParamSpecFlavor.BARE, prefix=Parameters([], [], []) + ) + else: + normalized = tv + if normalized in self.poly_tvars and normalized not in self.bound_tvars: + found_vars.append(normalized) + return remove_dups(found_vars) + + def visit_callable_type(self, t: CallableType) -> Type: + found_vars = self.collect_vars(t) + self.bound_tvars |= set(found_vars) + result = super().visit_callable_type(t) + self.bound_tvars -= set(found_vars) + + assert isinstance(result, ProperType) and isinstance(result, CallableType) + result.variables = list(result.variables) + found_vars + return result + + def visit_type_var(self, t: TypeVarType) -> Type: + if t in self.poly_tvars and t not in self.bound_tvars: + raise PolyTranslationError() + return super().visit_type_var(t) + + def visit_param_spec(self, t: ParamSpecType) -> Type: + if t in self.poly_tvars and t not in self.bound_tvars: + raise PolyTranslationError() + return super().visit_param_spec(t) + + def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type: + if t in self.poly_tvars and t not in self.bound_tvars: + raise PolyTranslationError() + return super().visit_type_var_tuple(t) + + def visit_type_alias_type(self, t: TypeAliasType) -> Type: + if not t.args: + return t.copy_modified() + if not t.is_recursive: + return get_proper_type(t).accept(self) + # We can't handle polymorphic application for recursive generic aliases + # without risking an infinite recursion, just give up for now. + raise PolyTranslationError() + + def visit_instance(self, t: Instance) -> Type: + if t.type.has_param_spec_type: + # We need this special-casing to preserve the possibility to store a + # generic function in an instance type. Things like + # forall T . Foo[[x: T], T] + # are not really expressible in current type system, but this looks like + # a useful feature, so let's keep it. + param_spec_index = next( + i for (i, tv) in enumerate(t.type.defn.type_vars) if isinstance(tv, ParamSpecType) + ) + p = get_proper_type(t.args[param_spec_index]) + if isinstance(p, Parameters): + found_vars = self.collect_vars(p) + self.bound_tvars |= set(found_vars) + new_args = [a.accept(self) for a in t.args] + self.bound_tvars -= set(found_vars) + + repl = new_args[param_spec_index] + assert isinstance(repl, ProperType) and isinstance(repl, Parameters) + repl.variables = list(repl.variables) + list(found_vars) + return t.copy_modified(args=new_args) + # There is the same problem with callback protocols as with aliases + # (callback protocols are essentially more flexible aliases to callables). + if t.args and t.type.is_protocol and t.type.protocol_members == ["__call__"]: + if t.type in self.seen_aliases: + raise PolyTranslationError() + call = mypy.subtypes.find_member("__call__", t, t, is_operator=True) + assert call is not None + return call.accept( + PolyTranslator(self.poly_tvars, self.bound_tvars, self.seen_aliases | {t.type}) + ) + return super().visit_instance(t) diff --git a/mypy/build.py b/mypy/build.py index 3ceb473f09484..733f0685792ed 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -3467,8 +3467,11 @@ def process_stale_scc(graph: Graph, scc: list[str], manager: BuildManager) -> No for id in stale: graph[id].transitive_error = True for id in stale: - errors = manager.errors.file_messages(graph[id].xpath, formatter=manager.error_formatter) - manager.flush_errors(manager.errors.simplify_path(graph[id].xpath), errors, False) + if graph[id].xpath not in manager.errors.ignored_files: + errors = manager.errors.file_messages( + graph[id].xpath, formatter=manager.error_formatter + ) + manager.flush_errors(manager.errors.simplify_path(graph[id].xpath), errors, False) graph[id].write_cache() graph[id].mark_as_rechecked() diff --git a/mypy/checker.py b/mypy/checker.py index 179ff6e0b4b68..2df74cf7be8dd 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -49,6 +49,7 @@ SUGGESTED_TEST_FIXTURES, MessageBuilder, append_invariance_notes, + append_union_note, format_type, format_type_bare, format_type_distinctly, @@ -125,6 +126,7 @@ TryStmt, TupleExpr, TypeAlias, + TypeAliasStmt, TypeInfo, TypeVarExpr, UnaryExpr, @@ -169,7 +171,6 @@ false_only, fixup_partial_type, function_type, - get_type_vars, is_literal_type_like, is_singleton_type, make_simplified_union, @@ -786,14 +787,35 @@ def check_overlapping_overloads(self, defn: OverloadedFuncDef) -> None: type_vars = current_class.defn.type_vars if current_class else [] with state.strict_optional_set(True): if is_unsafe_overlapping_overload_signatures(sig1, sig2, type_vars): - self.msg.overloaded_signatures_overlap(i + 1, i + j + 2, item.func) + flip_note = ( + j == 0 + and not is_unsafe_overlapping_overload_signatures( + sig2, sig1, type_vars + ) + and not overload_can_never_match(sig2, sig1) + ) + self.msg.overloaded_signatures_overlap( + i + 1, i + j + 2, flip_note, item.func + ) if impl_type is not None: assert defn.impl is not None + # This is what we want from implementation, it should accept all arguments + # of an overload, but the return types should go the opposite way. + if is_callable_compatible( + impl_type, + sig1, + is_compat=is_subtype, + is_proper_subtype=False, + is_compat_return=lambda l, r: is_subtype(r, l), + ): + continue + # If the above check didn't work, we repeat some key steps in + # is_callable_compatible() to give a better error message. + # We perform a unification step that's very similar to what - # 'is_callable_compatible' would have done if we had set - # 'unify_generics' to True -- the only difference is that + # 'is_callable_compatible' does -- the only difference is that # we check and see if the impl_type's return value is a # *supertype* of the overload alternative, not a *subtype*. # @@ -1751,6 +1773,8 @@ def is_unsafe_overlapping_op( # second operand is the right argument -- we switch the order of # the arguments of the reverse method. + # TODO: this manipulation is dangerous if callables are generic. + # Shuffling arguments between callables can create meaningless types. forward_tweaked = forward_item.copy_modified( arg_types=[forward_base_erased, forward_item.arg_types[0]], arg_kinds=[nodes.ARG_POS] * 2, @@ -1777,7 +1801,9 @@ def is_unsafe_overlapping_op( current_class = self.scope.active_class() type_vars = current_class.defn.type_vars if current_class else [] - return is_unsafe_overlapping_overload_signatures(first, second, type_vars) + return is_unsafe_overlapping_overload_signatures( + first, second, type_vars, partial_only=False + ) def check_inplace_operator_method(self, defn: FuncBase) -> None: """Check an inplace operator method such as __iadd__. @@ -1912,8 +1938,15 @@ def check_explicit_override_decorator( found_method_base_classes: list[TypeInfo] | None, context: Context | None = None, ) -> None: + plugin_generated = False + if defn.info and (node := defn.info.get(defn.name)) and node.plugin_generated: + # Do not report issues for plugin generated nodes, + # they can't realistically use `@override` for their methods. + plugin_generated = True + if ( - found_method_base_classes + not plugin_generated + and found_method_base_classes and not defn.is_explicit_override and defn.name not in ("__init__", "__new__") and not is_private(defn.name) @@ -2171,7 +2204,9 @@ def bind_and_map_method( def get_op_other_domain(self, tp: FunctionLike) -> Type | None: if isinstance(tp, CallableType): if tp.arg_kinds and tp.arg_kinds[0] == ARG_POS: - return tp.arg_types[0] + # For generic methods, domain comparison is tricky, as a first + # approximation erase all remaining type variables. + return erase_typevars(tp.arg_types[0], {v.id for v in tp.variables}) return None elif isinstance(tp, Overloaded): raw_items = [self.get_op_other_domain(it) for it in tp.items] @@ -2241,6 +2276,7 @@ def check_override( if fail: emitted_msg = False + offset_arguments = isinstance(override, CallableType) and override.unpack_kwargs # Normalize signatures, so we get better diagnostics. if isinstance(override, (CallableType, Overloaded)): override = override.with_unpacked_kwargs() @@ -2271,13 +2307,24 @@ def check_override( def erase_override(t: Type) -> Type: return erase_typevars(t, ids_to_erase=override_ids) - for i in range(len(override.arg_types)): - if not is_subtype( - original.arg_types[i], erase_override(override.arg_types[i]) - ): - arg_type_in_super = original.arg_types[i] - - if isinstance(node, FuncDef): + for i, (sub_kind, super_kind) in enumerate( + zip(override.arg_kinds, original.arg_kinds) + ): + if sub_kind.is_positional() and super_kind.is_positional(): + override_arg_type = override.arg_types[i] + original_arg_type = original.arg_types[i] + elif sub_kind.is_named() and super_kind.is_named() and not offset_arguments: + arg_name = override.arg_names[i] + if arg_name in original.arg_names: + override_arg_type = override.arg_types[i] + original_i = original.arg_names.index(arg_name) + original_arg_type = original.arg_types[original_i] + else: + continue + else: + continue + if not is_subtype(original_arg_type, erase_override(override_arg_type)): + if isinstance(node, FuncDef) and not node.is_property: context: Context = node.arguments[i + len(override.bound_args)] else: context = node @@ -2286,7 +2333,7 @@ def erase_override(t: Type) -> Type: name, type_name, name_in_super, - arg_type_in_super, + original_arg_type, supertype, context, secondary_context=node, @@ -3837,6 +3884,8 @@ def check_multi_assignment_from_tuple( self.expr_checker.accept(rvalue, lvalue_type) ) + if isinstance(reinferred_rvalue_type, TypeVarLikeType): + reinferred_rvalue_type = get_proper_type(reinferred_rvalue_type.upper_bound) if isinstance(reinferred_rvalue_type, UnionType): # If this is an Optional type in non-strict Optional code, unwrap it. relevant_items = reinferred_rvalue_type.relevant_items() @@ -5261,6 +5310,9 @@ def remove_capture_conflicts(self, type_map: TypeMap, inferred_types: dict[Var, if node not in inferred_types or not is_subtype(typ, inferred_types[node]): del type_map[expr] + def visit_type_alias_stmt(self, o: TypeAliasStmt) -> None: + self.expr_checker.accept(o.value) + def make_fake_typeinfo( self, curr_module_fullname: str, @@ -5717,7 +5769,9 @@ def combine_maps(list_maps: list[TypeMap]) -> TypeMap: else_map = {} return if_map, else_map - def find_isinstance_check(self, node: Expression) -> tuple[TypeMap, TypeMap]: + def find_isinstance_check( + self, node: Expression, *, in_boolean_context: bool = True + ) -> tuple[TypeMap, TypeMap]: """Find any isinstance checks (within a chain of ands). Includes implicit and explicit checks for None and calls to callable. Also includes TypeGuard and TypeIs functions. @@ -5728,15 +5782,24 @@ def find_isinstance_check(self, node: Expression) -> tuple[TypeMap, TypeMap]: If either of the values in the tuple is None, then that particular branch can never occur. + If `in_boolean_context=True` is passed, it means that we handle + a walrus expression. We treat rhs values + in expressions like `(a := A())` specially: + for example, some errors are suppressed. + May return {}, {}. Can return None, None in situations involving NoReturn. """ - if_map, else_map = self.find_isinstance_check_helper(node) + if_map, else_map = self.find_isinstance_check_helper( + node, in_boolean_context=in_boolean_context + ) new_if_map = self.propagate_up_typemap_info(if_map) new_else_map = self.propagate_up_typemap_info(else_map) return new_if_map, new_else_map - def find_isinstance_check_helper(self, node: Expression) -> tuple[TypeMap, TypeMap]: + def find_isinstance_check_helper( + self, node: Expression, *, in_boolean_context: bool = True + ) -> tuple[TypeMap, TypeMap]: if is_true_literal(node): return {}, None if is_false_literal(node): @@ -6005,7 +6068,9 @@ def has_no_custom_eq_checks(t: Type) -> bool: if else_assignment_map is not None: else_map.update(else_assignment_map) - if_condition_map, else_condition_map = self.find_isinstance_check(node.value) + if_condition_map, else_condition_map = self.find_isinstance_check( + node.value, in_boolean_context=False + ) if if_condition_map is not None: if_map.update(if_condition_map) @@ -6067,7 +6132,10 @@ def has_no_custom_eq_checks(t: Type) -> bool: # Restrict the type of the variable to True-ish/False-ish in the if and else branches # respectively original_vartype = self.lookup_type(node) - self._check_for_truthy_type(original_vartype, node) + if in_boolean_context: + # We don't check `:=` values in expressions like `(a := A())`, + # because they produce two error messages. + self._check_for_truthy_type(original_vartype, node) vartype = try_expanding_sum_type_to_union(original_vartype, "builtins.bool") if_type = true_only(vartype) @@ -6770,6 +6838,8 @@ def check_subtype( ) if isinstance(subtype, Instance) and isinstance(supertype, Instance): notes = append_invariance_notes(notes, subtype, supertype) + if isinstance(subtype, UnionType) and isinstance(supertype, UnionType): + notes = append_union_note(notes, subtype, supertype, self.options) if extra_info: msg = msg.with_additional_msg(" (" + ", ".join(extra_info) + ")") @@ -7272,7 +7342,11 @@ def is_writable_attribute(self, node: Node) -> bool: def get_isinstance_type(self, expr: Expression) -> list[TypeRange] | None: if isinstance(expr, OpExpr) and expr.op == "|": left = self.get_isinstance_type(expr.left) + if left is None and is_literal_none(expr.left): + left = [TypeRange(NoneType(), is_upper_bound=False)] right = self.get_isinstance_type(expr.right) + if right is None and is_literal_none(expr.right): + right = [TypeRange(NoneType(), is_upper_bound=False)] if left is None or right is None: return None return left + right @@ -7295,6 +7369,8 @@ def get_isinstance_type(self, expr: Expression) -> list[TypeRange] | None: elif isinstance(typ, Instance) and typ.type.fullname == "builtins.type": object_type = Instance(typ.type.mro[-1], []) types.append(TypeRange(object_type, is_upper_bound=True)) + elif isinstance(typ, Instance) and typ.type.fullname == "types.UnionType" and typ.args: + types.append(TypeRange(UnionType(typ.args), is_upper_bound=False)) elif isinstance(typ, AnyType): types.append(TypeRange(typ, is_upper_bound=False)) else: # we didn't see an actual type, but rather a variable with unknown value @@ -7793,68 +7869,112 @@ def are_argument_counts_overlapping(t: CallableType, s: CallableType) -> bool: return min_args <= max_args +def expand_callable_variants(c: CallableType) -> list[CallableType]: + """Expand a generic callable using all combinations of type variables' values/bounds.""" + for tv in c.variables: + # We need to expand self-type before other variables, because this is the only + # type variable that can have other type variables in the upper bound. + if tv.id.is_self(): + c = expand_type(c, {tv.id: tv.upper_bound}).copy_modified( + variables=[v for v in c.variables if not v.id.is_self()] + ) + break + + if not c.is_generic(): + # Fast path. + return [c] + + tvar_values = [] + for tvar in c.variables: + if isinstance(tvar, TypeVarType) and tvar.values: + tvar_values.append(tvar.values) + else: + tvar_values.append([tvar.upper_bound]) + + variants = [] + for combination in itertools.product(*tvar_values): + tvar_map = {tv.id: subst for (tv, subst) in zip(c.variables, combination)} + variants.append(expand_type(c, tvar_map).copy_modified(variables=[])) + return variants + + def is_unsafe_overlapping_overload_signatures( - signature: CallableType, other: CallableType, class_type_vars: list[TypeVarLikeType] + signature: CallableType, + other: CallableType, + class_type_vars: list[TypeVarLikeType], + partial_only: bool = True, ) -> bool: """Check if two overloaded signatures are unsafely overlapping or partially overlapping. - We consider two functions 's' and 't' to be unsafely overlapping if both - of the following are true: + We consider two functions 's' and 't' to be unsafely overlapping if three + conditions hold: + + 1. s's parameters are partially overlapping with t's. i.e. there are calls that are + valid for both signatures. + 2. for these common calls, some of t's parameters types are wider that s's. + 3. s's return type is NOT a subset of t's. - 1. s's parameters are all more precise or partially overlapping with t's - 2. s's return type is NOT a subtype of t's. + Note that we use subset rather than subtype relationship in these checks because: + * Overload selection happens at runtime, not statically. + * This results in more lenient behavior. + This can cause false negatives (e.g. if overloaded function returns an externally + visible attribute with invariant type), but such situations are rare. In general, + overloads in Python are generally unsafe, so we intentionally try to avoid giving + non-actionable errors (see more details in comments below). Assumes that 'signature' appears earlier in the list of overload alternatives then 'other' and that their argument counts are overlapping. """ # Try detaching callables from the containing class so that all TypeVars - # are treated as being free. - # - # This lets us identify cases where the two signatures use completely - # incompatible types -- e.g. see the testOverloadingInferUnionReturnWithMixedTypevars - # test case. + # are treated as being free, i.e. the signature is as seen from inside the class, + # where "self" is not yet bound to anything. signature = detach_callable(signature, class_type_vars) other = detach_callable(other, class_type_vars) - # Note: We repeat this check twice in both directions due to a slight - # asymmetry in 'is_callable_compatible'. When checking for partial overlaps, - # we attempt to unify 'signature' and 'other' both against each other. - # - # If 'signature' cannot be unified with 'other', we end early. However, - # if 'other' cannot be modified with 'signature', the function continues - # using the older version of 'other'. - # - # This discrepancy is unfortunately difficult to get rid of, so we repeat the - # checks twice in both directions for now. - # - # Note that we ignore possible overlap between type variables and None. This - # is technically unsafe, but unsafety is tiny and this prevents some common - # use cases like: - # @overload - # def foo(x: None) -> None: .. - # @overload - # def foo(x: T) -> Foo[T]: ... - return is_callable_compatible( - signature, - other, - is_compat=is_overlapping_types_no_promote_no_uninhabited_no_none, - is_proper_subtype=False, - is_compat_return=lambda l, r: not is_subtype_no_promote(l, r), - ignore_return=False, - check_args_covariantly=True, - allow_partial_overlap=True, - no_unify_none=True, - ) or is_callable_compatible( - other, - signature, - is_compat=is_overlapping_types_no_promote_no_uninhabited_no_none, - is_proper_subtype=False, - is_compat_return=lambda l, r: not is_subtype_no_promote(r, l), - ignore_return=False, - check_args_covariantly=False, - allow_partial_overlap=True, - no_unify_none=True, - ) + # Note: We repeat this check twice in both directions compensate for slight + # asymmetries in 'is_callable_compatible'. + + for sig_variant in expand_callable_variants(signature): + for other_variant in expand_callable_variants(other): + # Using only expanded callables may cause false negatives, we can add + # more variants (e.g. using inference between callables) in the future. + if is_subset_no_promote(sig_variant.ret_type, other_variant.ret_type): + continue + if not ( + is_callable_compatible( + sig_variant, + other_variant, + is_compat=is_overlapping_types_for_overload, + check_args_covariantly=False, + is_proper_subtype=False, + is_compat_return=lambda l, r: not is_subset_no_promote(l, r), + allow_partial_overlap=True, + ) + or is_callable_compatible( + other_variant, + sig_variant, + is_compat=is_overlapping_types_for_overload, + check_args_covariantly=True, + is_proper_subtype=False, + is_compat_return=lambda l, r: not is_subset_no_promote(r, l), + allow_partial_overlap=True, + ) + ): + continue + # Using the same `allow_partial_overlap` flag as before, can cause false + # negatives in case where star argument is used in a catch-all fallback overload. + # But again, practicality beats purity here. + if not partial_only or not is_callable_compatible( + other_variant, + sig_variant, + is_compat=is_subset_no_promote, + check_args_covariantly=True, + is_proper_subtype=False, + ignore_return=True, + allow_partial_overlap=True, + ): + return True + return False def detach_callable(typ: CallableType, class_type_vars: list[TypeVarLikeType]) -> CallableType: @@ -7863,21 +7983,11 @@ def detach_callable(typ: CallableType, class_type_vars: list[TypeVarLikeType]) - A callable normally keeps track of the type variables it uses within its 'variables' field. However, if the callable is from a method and that method is using a class type variable, the callable will not keep track of that type variable since it belongs to the class. - - This function will traverse the callable and find all used type vars and add them to the - variables field if it isn't already present. - - The caller can then unify on all type variables whether the callable is originally from - the class or not.""" + """ if not class_type_vars: # Fast path, nothing to update. return typ - seen_type_vars = set() - for t in typ.arg_types + [typ.ret_type]: - seen_type_vars |= set(get_type_vars(t)) - return typ.copy_modified( - variables=list(typ.variables) + [tv for tv in class_type_vars if tv in seen_type_vars] - ) + return typ.copy_modified(variables=list(typ.variables) + class_type_vars) def overload_can_never_match(signature: CallableType, other: CallableType) -> bool: @@ -8354,21 +8464,24 @@ def get_property_type(t: ProperType) -> ProperType: return t -def is_subtype_no_promote(left: Type, right: Type) -> bool: - return is_subtype(left, right, ignore_promotions=True) +def is_subset_no_promote(left: Type, right: Type) -> bool: + return is_subtype(left, right, ignore_promotions=True, always_covariant=True) -def is_overlapping_types_no_promote_no_uninhabited_no_none(left: Type, right: Type) -> bool: - # For the purpose of unsafe overload checks we consider list[Never] and list[int] - # non-overlapping. This is consistent with how we treat list[int] and list[str] as - # non-overlapping, despite [] belongs to both. Also this will prevent false positives - # for failed type inference during unification. +def is_overlapping_types_for_overload(left: Type, right: Type) -> bool: + # Note that among other effects 'overlap_for_overloads' flag will effectively + # ignore possible overlap between type variables and None. This is technically + # unsafe, but unsafety is tiny and this prevents some common use cases like: + # @overload + # def foo(x: None) -> None: .. + # @overload + # def foo(x: T) -> Foo[T]: ... return is_overlapping_types( left, right, ignore_promotions=True, - ignore_uninhabited=True, prohibit_none_typevar_overlap=True, + overlap_for_overloads=True, ) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 33509300d430a..bbd699916258a 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -36,6 +36,7 @@ ARG_STAR, ARG_STAR2, IMPLICITLY_ABSTRACT, + LAMBDA_NAME, LITERAL_TYPE, REVEAL_LOCALS, REVEAL_TYPE, @@ -115,7 +116,6 @@ non_method_protocol_members, ) from mypy.traverser import has_await_expression -from mypy.type_visitor import TypeTranslator from mypy.typeanal import ( check_for_explicit_any, fix_instance, @@ -167,7 +167,7 @@ TypedDictType, TypeOfAny, TypeType, - TypeVarLikeType, + TypeVarId, TypeVarTupleType, TypeVarType, UnboundType, @@ -181,7 +181,6 @@ get_proper_types, has_recursive_types, is_named_instance, - remove_dups, split_with_prefix_and_suffix, ) from mypy.types_utils import ( @@ -413,7 +412,9 @@ def analyze_ref_expr(self, e: RefExpr, lvalue: bool = False) -> Type: result = self.alias_type_in_runtime_context( node, ctx=e, alias_definition=e.is_alias_rvalue or lvalue ) - elif isinstance(node, (TypeVarExpr, ParamSpecExpr, TypeVarTupleExpr)): + elif isinstance(node, TypeVarExpr): + return self.named_type("typing.TypeVar") + elif isinstance(node, (ParamSpecExpr, TypeVarTupleExpr)): result = self.object_type() else: if isinstance(node, PlaceholderNode): @@ -428,6 +429,9 @@ def analyze_var_ref(self, var: Var, context: Context) -> Type: if var.type: var_type = get_proper_type(var.type) if isinstance(var_type, Instance): + if var.fullname == "typing.Any": + # The typeshed type is 'object'; give a more useful type in runtime context + return self.named_type("typing._SpecialForm") if self.is_literal_context() and var_type.last_known_value is not None: return var_type.last_known_value if var.name in {"True", "False"}: @@ -529,6 +533,10 @@ def visit_call_expr_inner(self, e: CallExpr, allow_none_return: bool = False) -> and node and isinstance(node.node, TypeAlias) and not node.node.no_args + and not ( + isinstance(union_target := get_proper_type(node.node.target), UnionType) + and union_target.uses_pep604_syntax + ) ): self.msg.type_arguments_not_allowed(e) if isinstance(typ, RefExpr) and isinstance(typ.node, TypeInfo): @@ -592,6 +600,7 @@ def visit_call_expr_inner(self, e: CallExpr, allow_none_return: bool = False) -> and self.chk.in_checked_function() and isinstance(callee_type, CallableType) and callee_type.implicit + and callee_type.name != LAMBDA_NAME ): if fullname is None and member is not None: assert object_type is not None @@ -1221,6 +1230,8 @@ def apply_function_plugin( formal_arg_exprs[formal].append(args[actual]) if arg_names: formal_arg_names[formal].append(arg_names[actual]) + else: + formal_arg_names[formal].append(None) formal_arg_kinds[formal].append(arg_kinds[actual]) if object_type is None: @@ -2115,7 +2126,7 @@ def infer_function_type_arguments( ) # Try applying inferred polymorphic type if possible, e.g. Callable[[T], T] can # be interpreted as def [T] (T) -> T, but dict[T, T] cannot be expressed. - applied = apply_poly(poly_callee_type, free_vars) + applied = applytype.apply_poly(poly_callee_type, free_vars) if applied is not None and all( a is not None and not isinstance(get_proper_type(a), UninhabitedType) for a in poly_inferred_args @@ -3296,7 +3307,9 @@ def analyze_ordinary_member_access(self, e: MemberExpr, is_lvalue: bool) -> Type if isinstance(base, RefExpr) and isinstance(base.node, MypyFile): module_symbol_table = base.node.names if isinstance(base, RefExpr) and isinstance(base.node, Var): - is_self = base.node.is_self + # This is needed to special case self-types, so we don't need to track + # these flags separately in checkmember.py. + is_self = base.node.is_self or base.node.is_cls else: is_self = False @@ -4364,16 +4377,25 @@ def visit_index_with_type( return self.nonliteral_tuple_index_helper(left_type, index) elif isinstance(left_type, TypedDictType): return self.visit_typeddict_index_expr(left_type, e.index) - elif ( - isinstance(left_type, FunctionLike) - and left_type.is_type_obj() - and left_type.type_object().is_enum - ): - return self.visit_enum_index_expr(left_type.type_object(), e.index, e) - elif isinstance(left_type, TypeVarType) and not self.has_member( + elif isinstance(left_type, FunctionLike) and left_type.is_type_obj(): + if left_type.type_object().is_enum: + return self.visit_enum_index_expr(left_type.type_object(), e.index, e) + elif left_type.type_object().type_vars: + return self.named_type("types.GenericAlias") + elif ( + left_type.type_object().fullname == "builtins.type" + and self.chk.options.python_version >= (3, 9) + ): + # builtins.type is special: it's not generic in stubs, but it supports indexing + return self.named_type("typing._SpecialForm") + + if isinstance(left_type, TypeVarType) and not self.has_member( left_type.upper_bound, "__getitem__" ): return self.visit_index_with_type(left_type.upper_bound, e, original_type) + elif isinstance(left_type, Instance) and left_type.type.fullname == "typing._SpecialForm": + # Allow special forms to be indexed and used to create union types + return self.named_type("typing._SpecialForm") else: result, method_type = self.check_method_call_by_name( "__getitem__", left_type, [e.index], [ARG_POS], e, original_type=original_type @@ -4698,6 +4720,8 @@ def visit_type_application(self, tapp: TypeApplication) -> Type: is due to slight differences in how type arguments are applied and checked. """ if isinstance(tapp.expr, RefExpr) and isinstance(tapp.expr.node, TypeAlias): + if tapp.expr.node.python_3_12_type_alias: + return self.named_type("typing.TypeAliasType") # Subscription of a (generic) alias in runtime context, expand the alias. item = instantiate_type_alias( tapp.expr.node, @@ -4760,6 +4784,8 @@ class LongName(Generic[T]): ... x = A() y = cast(A, ...) """ + if alias.python_3_12_type_alias: + return self.named_type("typing.TypeAliasType") if isinstance(alias.target, Instance) and alias.target.invalid: # type: ignore[misc] # An invalid alias, error already has been reported return AnyType(TypeOfAny.from_error) @@ -4799,6 +4825,12 @@ class LongName(Generic[T]): ... return TypeType(item, line=item.line, column=item.column) elif isinstance(item, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=item) + elif ( + isinstance(item, UnionType) + and item.uses_pep604_syntax + and self.chk.options.python_version >= (3, 10) + ): + return self.chk.named_generic_type("types.UnionType", item.items) else: if alias_definition: return AnyType(TypeOfAny.special_form) @@ -4818,7 +4850,11 @@ class C(Generic[T, Unpack[Ts]]): ... We simply group the arguments that need to go into Ts variable into a TupleType, similar to how it is done in other places using split_with_prefix_and_suffix(). """ - vars = t.variables + if t.is_type_obj(): + # Type arguments must map to class type variables, ignoring constructor vars. + vars = t.type_object().defn.type_vars + else: + vars = list(t.variables) args = flatten_nested_tuples(args) # TODO: this logic is duplicated with semanal_typeargs. @@ -4836,6 +4872,7 @@ class C(Generic[T, Unpack[Ts]]): ... if not vars or not any(isinstance(v, TypeVarTupleType) for v in vars): return list(args) + # TODO: in future we may want to support type application to variadic functions. assert t.is_type_obj() info = t.type_object() # We reuse the logic from semanal phase to reduce code duplication. @@ -4869,10 +4906,23 @@ def apply_type_arguments_to_callable( tp = get_proper_type(tp) if isinstance(tp, CallableType): - min_arg_count = sum(not v.has_default() for v in tp.variables) - has_type_var_tuple = any(isinstance(v, TypeVarTupleType) for v in tp.variables) + if tp.is_type_obj(): + # If we have a class object in runtime context, then the available type + # variables are those of the class, we don't include additional variables + # of the constructor. So that with + # class C(Generic[T]): + # def __init__(self, f: Callable[[S], T], x: S) -> None + # C[int] is valid + # C[int, str] is invalid (although C as a callable has 2 type variables) + # Note: various logic below and in applytype.py relies on the fact that + # class type variables appear *before* constructor variables. + type_vars = tp.type_object().defn.type_vars + else: + type_vars = list(tp.variables) + min_arg_count = sum(not v.has_default() for v in type_vars) + has_type_var_tuple = any(isinstance(v, TypeVarTupleType) for v in type_vars) if ( - len(args) < min_arg_count or len(args) > len(tp.variables) + len(args) < min_arg_count or len(args) > len(type_vars) ) and not has_type_var_tuple: if tp.is_type_obj() and tp.type_object().fullname == "builtins.tuple": # e.g. expression tuple[X, Y] @@ -4891,19 +4941,24 @@ def apply_type_arguments_to_callable( bound_args=tp.bound_args, ) self.msg.incompatible_type_application( - min_arg_count, len(tp.variables), len(args), ctx + min_arg_count, len(type_vars), len(args), ctx ) return AnyType(TypeOfAny.from_error) return self.apply_generic_arguments(tp, self.split_for_callable(tp, args, ctx), ctx) if isinstance(tp, Overloaded): for it in tp.items: - min_arg_count = sum(not v.has_default() for v in it.variables) - has_type_var_tuple = any(isinstance(v, TypeVarTupleType) for v in it.variables) + if tp.is_type_obj(): + # Same as above. + type_vars = tp.type_object().defn.type_vars + else: + type_vars = list(it.variables) + min_arg_count = sum(not v.has_default() for v in type_vars) + has_type_var_tuple = any(isinstance(v, TypeVarTupleType) for v in type_vars) if ( - len(args) < min_arg_count or len(args) > len(it.variables) + len(args) < min_arg_count or len(args) > len(type_vars) ) and not has_type_var_tuple: self.msg.incompatible_type_application( - min_arg_count, len(it.variables), len(args), ctx + min_arg_count, len(type_vars), len(args), ctx ) return AnyType(TypeOfAny.from_error) return Overloaded( @@ -4968,7 +5023,7 @@ def check_lst_expr(self, e: ListExpr | SetExpr | TupleExpr, fullname: str, tag: tv = TypeVarType( "T", "T", - id=-1, + id=TypeVarId(-1, namespace=""), values=[], upper_bound=self.object_type(), default=AnyType(TypeOfAny.from_omitted_generics), @@ -5199,7 +5254,7 @@ def visit_dict_expr(self, e: DictExpr) -> Type: kt = TypeVarType( "KT", "KT", - id=-1, + id=TypeVarId(-1, namespace=""), values=[], upper_bound=self.object_type(), default=AnyType(TypeOfAny.from_omitted_generics), @@ -5207,7 +5262,7 @@ def visit_dict_expr(self, e: DictExpr) -> Type: vt = TypeVarType( "VT", "VT", - id=-2, + id=TypeVarId(-2, namespace=""), values=[], upper_bound=self.object_type(), default=AnyType(TypeOfAny.from_omitted_generics), @@ -5599,7 +5654,7 @@ def check_generator_or_comprehension( tv = TypeVarType( "T", "T", - id=-1, + id=TypeVarId(-1, namespace=""), values=[], upper_bound=self.object_type(), default=AnyType(TypeOfAny.from_omitted_generics), @@ -5626,7 +5681,7 @@ def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> Type: ktdef = TypeVarType( "KT", "KT", - id=-1, + id=TypeVarId(-1, namespace=""), values=[], upper_bound=self.object_type(), default=AnyType(TypeOfAny.from_omitted_generics), @@ -5634,7 +5689,7 @@ def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> Type: vtdef = TypeVarType( "VT", "VT", - id=-2, + id=TypeVarId(-2, namespace=""), values=[], upper_bound=self.object_type(), default=AnyType(TypeOfAny.from_omitted_generics), @@ -6254,129 +6309,6 @@ def replace_callable_return_type(c: CallableType, new_ret_type: Type) -> Callabl return c.copy_modified(ret_type=new_ret_type) -def apply_poly(tp: CallableType, poly_tvars: Sequence[TypeVarLikeType]) -> CallableType | None: - """Make free type variables generic in the type if possible. - - This will translate the type `tp` while trying to create valid bindings for - type variables `poly_tvars` while traversing the type. This follows the same rules - as we do during semantic analysis phase, examples: - * Callable[Callable[[T], T], T] -> def [T] (def (T) -> T) -> T - * Callable[[], Callable[[T], T]] -> def () -> def [T] (T -> T) - * List[T] -> None (not possible) - """ - try: - return tp.copy_modified( - arg_types=[t.accept(PolyTranslator(poly_tvars)) for t in tp.arg_types], - ret_type=tp.ret_type.accept(PolyTranslator(poly_tvars)), - variables=[], - ) - except PolyTranslationError: - return None - - -class PolyTranslationError(Exception): - pass - - -class PolyTranslator(TypeTranslator): - """Make free type variables generic in the type if possible. - - See docstring for apply_poly() for details. - """ - - def __init__( - self, - poly_tvars: Iterable[TypeVarLikeType], - bound_tvars: frozenset[TypeVarLikeType] = frozenset(), - seen_aliases: frozenset[TypeInfo] = frozenset(), - ) -> None: - self.poly_tvars = set(poly_tvars) - # This is a simplified version of TypeVarScope used during semantic analysis. - self.bound_tvars = bound_tvars - self.seen_aliases = seen_aliases - - def collect_vars(self, t: CallableType | Parameters) -> list[TypeVarLikeType]: - found_vars = [] - for arg in t.arg_types: - for tv in get_all_type_vars(arg): - if isinstance(tv, ParamSpecType): - normalized: TypeVarLikeType = tv.copy_modified( - flavor=ParamSpecFlavor.BARE, prefix=Parameters([], [], []) - ) - else: - normalized = tv - if normalized in self.poly_tvars and normalized not in self.bound_tvars: - found_vars.append(normalized) - return remove_dups(found_vars) - - def visit_callable_type(self, t: CallableType) -> Type: - found_vars = self.collect_vars(t) - self.bound_tvars |= set(found_vars) - result = super().visit_callable_type(t) - self.bound_tvars -= set(found_vars) - - assert isinstance(result, ProperType) and isinstance(result, CallableType) - result.variables = list(result.variables) + found_vars - return result - - def visit_type_var(self, t: TypeVarType) -> Type: - if t in self.poly_tvars and t not in self.bound_tvars: - raise PolyTranslationError() - return super().visit_type_var(t) - - def visit_param_spec(self, t: ParamSpecType) -> Type: - if t in self.poly_tvars and t not in self.bound_tvars: - raise PolyTranslationError() - return super().visit_param_spec(t) - - def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type: - if t in self.poly_tvars and t not in self.bound_tvars: - raise PolyTranslationError() - return super().visit_type_var_tuple(t) - - def visit_type_alias_type(self, t: TypeAliasType) -> Type: - if not t.args: - return t.copy_modified() - if not t.is_recursive: - return get_proper_type(t).accept(self) - # We can't handle polymorphic application for recursive generic aliases - # without risking an infinite recursion, just give up for now. - raise PolyTranslationError() - - def visit_instance(self, t: Instance) -> Type: - if t.type.has_param_spec_type: - # We need this special-casing to preserve the possibility to store a - # generic function in an instance type. Things like - # forall T . Foo[[x: T], T] - # are not really expressible in current type system, but this looks like - # a useful feature, so let's keep it. - param_spec_index = next( - i for (i, tv) in enumerate(t.type.defn.type_vars) if isinstance(tv, ParamSpecType) - ) - p = get_proper_type(t.args[param_spec_index]) - if isinstance(p, Parameters): - found_vars = self.collect_vars(p) - self.bound_tvars |= set(found_vars) - new_args = [a.accept(self) for a in t.args] - self.bound_tvars -= set(found_vars) - - repl = new_args[param_spec_index] - assert isinstance(repl, ProperType) and isinstance(repl, Parameters) - repl.variables = list(repl.variables) + list(found_vars) - return t.copy_modified(args=new_args) - # There is the same problem with callback protocols as with aliases - # (callback protocols are essentially more flexible aliases to callables). - if t.args and t.type.is_protocol and t.type.protocol_members == ["__call__"]: - if t.type in self.seen_aliases: - raise PolyTranslationError() - call = find_member("__call__", t, t, is_operator=True) - assert call is not None - return call.accept( - PolyTranslator(self.poly_tvars, self.bound_tvars, self.seen_aliases | {t.type}) - ) - return super().visit_instance(t) - - class ArgInferSecondPassQuery(types.BoolTypeQuery): """Query whether an argument type should be inferred in the second pass. diff --git a/mypy/checkmember.py b/mypy/checkmember.py index fa847de2e4a0c..0f117f5475ed1 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -638,7 +638,7 @@ def analyze_descriptor_access(descriptor_type: Type, mx: MemberContext) -> Type: Return: The return type of the appropriate ``__get__`` overload for the descriptor. """ - instance_type = get_proper_type(mx.original_type) + instance_type = get_proper_type(mx.self_type) orig_descriptor_type = descriptor_type descriptor_type = get_proper_type(descriptor_type) @@ -647,16 +647,6 @@ def analyze_descriptor_access(descriptor_type: Type, mx: MemberContext) -> Type: return make_simplified_union( [analyze_descriptor_access(typ, mx) for typ in descriptor_type.items] ) - elif isinstance(instance_type, UnionType): - # map over the instance types - return make_simplified_union( - [ - analyze_descriptor_access( - descriptor_type, mx.copy_modified(original_type=original_type) - ) - for original_type in instance_type.relevant_items() - ] - ) elif not isinstance(descriptor_type, Instance): return orig_descriptor_type @@ -777,23 +767,10 @@ def analyze_var( if mx.is_lvalue and var.is_classvar: mx.msg.cant_assign_to_classvar(name, mx.context) t = freshen_all_functions_type_vars(typ) - if not (mx.is_self or mx.is_super) or supported_self_type( - get_proper_type(mx.original_type) - ): - t = expand_self_type(var, t, mx.original_type) - elif ( - mx.is_self - and original_itype.type != var.info - # If an attribute with Self-type was defined in a supertype, we need to - # rebind the Self type variable to Self type variable of current class... - and original_itype.type.self_type is not None - # ...unless `self` has an explicit non-trivial annotation. - and original_itype == mx.chk.scope.active_self_type() - ): - t = expand_self_type(var, t, original_itype.type.self_type) - t = get_proper_type(expand_type_by_instance(t, itype)) + t = expand_self_type_if_needed(t, mx, var, original_itype) + t = expand_type_by_instance(t, itype) freeze_all_type_vars(t) - result: Type = t + result = t typ = get_proper_type(typ) call_type: ProperType | None = None @@ -857,6 +834,50 @@ def analyze_var( return result +def expand_self_type_if_needed( + t: Type, mx: MemberContext, var: Var, itype: Instance, is_class: bool = False +) -> Type: + """Expand special Self type in a backwards compatible manner. + + This should ensure that mixing old-style and new-style self-types work + seamlessly. Also, re-bind new style self-types in subclasses if needed. + """ + original = get_proper_type(mx.self_type) + if not (mx.is_self or mx.is_super): + repl = mx.self_type + if is_class: + if isinstance(original, TypeType): + repl = original.item + elif isinstance(original, CallableType): + # Problematic access errors should have been already reported. + repl = erase_typevars(original.ret_type) + else: + repl = itype + return expand_self_type(var, t, repl) + elif supported_self_type( + # Support compatibility with plain old style T -> T and Type[T] -> T only. + get_proper_type(mx.self_type), + allow_instances=False, + allow_callable=False, + ): + repl = mx.self_type + if is_class and isinstance(original, TypeType): + repl = original.item + return expand_self_type(var, t, repl) + elif ( + mx.is_self + and itype.type != var.info + # If an attribute with Self-type was defined in a supertype, we need to + # rebind the Self type variable to Self type variable of current class... + and itype.type.self_type is not None + # ...unless `self` has an explicit non-trivial annotation. + and itype == mx.chk.scope.active_self_type() + ): + return expand_self_type(var, t, itype.type.self_type) + else: + return t + + def freeze_all_type_vars(member_type: Type) -> None: member_type.accept(FreezeTypeVarsVisitor()) @@ -1059,12 +1080,11 @@ def analyze_class_attribute_access( else: message = message_registry.GENERIC_INSTANCE_VAR_CLASS_ACCESS mx.msg.fail(message, mx.context) - + t = expand_self_type_if_needed(t, mx, node.node, itype, is_class=True) # Erase non-mapped variables, but keep mapped ones, even if there is an error. # In the above example this means that we infer following types: # C.x -> Any # C[int].x -> int - t = get_proper_type(expand_self_type(node.node, t, itype)) t = erase_typevars(expand_type_by_instance(t, isuper), {tv.id for tv in def_vars}) is_classmethod = (is_decorated and cast(Decorator, node.node).func.is_class) or ( @@ -1143,6 +1163,17 @@ def analyze_enum_class_attribute_access( if name.startswith("__") and name.replace("_", "") != "": return None + node = itype.type.get(name) + if node and node.type: + proper = get_proper_type(node.type) + # Support `A = nonmember(1)` function call and decorator. + if ( + isinstance(proper, Instance) + and proper.type.fullname == "enum.nonmember" + and proper.args + ): + return proper.args[0] + enum_literal = LiteralType(name, fallback=itype) return itype.copy_modified(last_known_value=enum_literal) diff --git a/mypy/constraints.py b/mypy/constraints.py index cdfa39ac45f30..49a2aea8fa051 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -223,9 +223,6 @@ def infer_constraints_for_callable( if actual_arg_type is None: continue - actual_type = mapper.expand_actual_type( - actual_arg_type, arg_kinds[actual], callee.arg_names[i], callee.arg_kinds[i] - ) if param_spec and callee.arg_kinds[i] in (ARG_STAR, ARG_STAR2): # If actual arguments are mapped to ParamSpec type, we can't infer individual # constraints, instead store them and infer single constraint at the end. @@ -243,6 +240,12 @@ def infer_constraints_for_callable( ) param_spec_arg_names.append(arg_names[actual] if arg_names else None) else: + actual_type = mapper.expand_actual_type( + actual_arg_type, + arg_kinds[actual], + callee.arg_names[i], + callee.arg_kinds[i], + ) c = infer_constraints(callee.arg_types[i], actual_type, SUPERTYPE_OF) constraints.extend(c) if ( @@ -688,14 +691,19 @@ def visit_unpack_type(self, template: UnpackType) -> list[Constraint]: def visit_parameters(self, template: Parameters) -> list[Constraint]: # Constraining Any against C[P] turns into infer_against_any([P], Any) - # ... which seems like the only case this can happen. Better to fail loudly otherwise. if isinstance(self.actual, AnyType): return self.infer_against_any(template.arg_types, self.actual) if type_state.infer_polymorphic and isinstance(self.actual, Parameters): # For polymorphic inference we need to be able to infer secondary constraints # in situations like [x: T] <: P <: [x: int]. return infer_callable_arguments_constraints(template, self.actual, self.direction) - raise RuntimeError("Parameters cannot be constrained to") + if type_state.infer_polymorphic and isinstance(self.actual, ParamSpecType): + # Similar for [x: T] <: Q <: Concatenate[int, P]. + return infer_callable_arguments_constraints( + template, self.actual.prefix, self.direction + ) + # There also may be unpatched types after a user error, simply ignore them. + return [] # Non-leaf types @@ -1047,7 +1055,7 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: # like U -> U, should be Callable[..., Any], but if U is a self-type, we can # allow it to leak, to be later bound to self. A bunch of existing code # depends on this old behaviour. - and not any(tv.id.raw_id == 0 for tv in cactual.variables) + and not any(tv.id.is_self() for tv in cactual.variables) ): # If the actual callable is generic, infer constraints in the opposite # direction, and indicate to the solver there are extra type variables @@ -1063,7 +1071,11 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: # (with literal '...'). if not template.is_ellipsis_args: unpack_present = find_unpack_in_list(template.arg_types) - if unpack_present is not None: + # When both ParamSpec and TypeVarTuple are present, things become messy + # quickly. For now, we only allow ParamSpec to "capture" TypeVarTuple, + # but not vice versa. + # TODO: infer more from prefixes when possible. + if unpack_present is not None and not cactual.param_spec(): # We need to re-normalize args to the form they appear in tuples, # for callables we always pack the suffix inside another tuple. unpack = template.arg_types[unpack_present] diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py index 7de796a70c8dc..6e8763264ddd3 100644 --- a/mypy/errorcodes.py +++ b/mypy/errorcodes.py @@ -271,7 +271,7 @@ def __hash__(self) -> int: del error_codes[FILE.code] # This is a catch-all for remaining uncategorized errors. -MISC: Final = ErrorCode("misc", "Miscellaneous other checks", "General") +MISC: Final[ErrorCode] = ErrorCode("misc", "Miscellaneous other checks", "General") OVERLOAD_OVERLAP: Final[ErrorCode] = ErrorCode( "overload-overlap", diff --git a/mypy/errors.py b/mypy/errors.py index 7a937da39c20d..d6dcd4e49e130 100644 --- a/mypy/errors.py +++ b/mypy/errors.py @@ -803,7 +803,7 @@ def blocker_module(self) -> str | None: def is_errors_for_file(self, file: str) -> bool: """Are there any errors for the given file?""" - return file in self.error_info_map + return file in self.error_info_map and file not in self.ignored_files def prefer_simple_messages(self) -> bool: """Should we generate simple/fast error messages? diff --git a/mypy/expandtype.py b/mypy/expandtype.py index f7fa0258f588f..5c4d6af9458e3 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -221,7 +221,7 @@ def visit_instance(self, t: Instance) -> Type: def visit_type_var(self, t: TypeVarType) -> Type: # Normally upper bounds can't contain other type variables, the only exception is # special type variable Self`0 <: C[T, S], where C is the class where Self is used. - if t.id.raw_id == 0: + if t.id.is_self(): t = t.copy_modified(upper_bound=t.upper_bound.accept(self)) repl = self.variables.get(t.id, t) if isinstance(repl, ProperType) and isinstance(repl, Instance): @@ -270,6 +270,13 @@ def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type: repl = self.variables.get(t.id, t) if isinstance(repl, TypeVarTupleType): return repl + elif isinstance(repl, ProperType) and isinstance(repl, (AnyType, UninhabitedType)): + # Some failed inference scenarios will try to set all type variables to Never. + # Instead of being picky and require all the callers to wrap them, + # do this here instead. + # Note: most cases when this happens are handled in expand unpack below, but + # in rare cases (e.g. ParamSpec containing Unpack star args) it may be skipped. + return t.tuple_fallback.copy_modified(args=[repl]) raise NotImplementedError def visit_unpack_type(self, t: UnpackType) -> Type: @@ -316,7 +323,7 @@ def interpolate_args_for_unpack(self, t: CallableType, var_arg: UnpackType) -> l new_unpack: Type if isinstance(var_arg_type, Instance): # we have something like Unpack[Tuple[Any, ...]] - new_unpack = var_arg + new_unpack = UnpackType(var_arg.type.accept(self)) elif isinstance(var_arg_type, TupleType): # We have something like Unpack[Tuple[Unpack[Ts], X1, X2]] expanded_tuple = var_arg_type.accept(self) @@ -348,7 +355,7 @@ def visit_callable_type(self, t: CallableType) -> CallableType: # the replacement is ignored. if isinstance(repl, Parameters): # We need to expand both the types in the prefix and the ParamSpec itself - return t.copy_modified( + expanded = t.copy_modified( arg_types=self.expand_types(t.arg_types[:-2]) + repl.arg_types, arg_kinds=t.arg_kinds[:-2] + repl.arg_kinds, arg_names=t.arg_names[:-2] + repl.arg_names, @@ -358,6 +365,11 @@ def visit_callable_type(self, t: CallableType) -> CallableType: imprecise_arg_kinds=(t.imprecise_arg_kinds or repl.imprecise_arg_kinds), variables=[*repl.variables, *t.variables], ) + var_arg = expanded.var_arg() + if var_arg is not None and isinstance(var_arg.typ, UnpackType): + # Sometimes we get new unpacks after expanding ParamSpec. + expanded.normalize_trivial_unpack() + return expanded elif isinstance(repl, ParamSpecType): # We're substituting one ParamSpec for another; this can mean that the prefix # changes, e.g. substitute Concatenate[int, P] in place of Q. diff --git a/mypy/exprtotype.py b/mypy/exprtotype.py index 2218a950788cc..d9bdf2e2b20b8 100644 --- a/mypy/exprtotype.py +++ b/mypy/exprtotype.py @@ -122,7 +122,8 @@ def expr_to_unanalyzed_type( [ expr_to_unanalyzed_type(expr.left, options, allow_new_syntax), expr_to_unanalyzed_type(expr.right, options, allow_new_syntax), - ] + ], + uses_pep604_syntax=True, ) elif isinstance(expr, CallExpr) and isinstance(_parent, ListExpr): c = expr.callee diff --git a/mypy/fastparse.py b/mypy/fastparse.py index 49f0a938b7501..342cf36d69e8b 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -1185,8 +1185,16 @@ def translate_type_params(self, type_params: list[Any]) -> list[TypeParam]: explicit_type_params.append(TypeParam(p.name, TYPE_VAR_TUPLE_KIND, None, [])) else: if isinstance(p.bound, ast3.Tuple): - conv = TypeConverter(self.errors, line=p.lineno) - values = [conv.visit(t) for t in p.bound.elts] + if len(p.bound.elts) < 2: + self.fail( + message_registry.TYPE_VAR_TOO_FEW_CONSTRAINED_TYPES, + p.lineno, + p.col_offset, + blocker=False, + ) + else: + conv = TypeConverter(self.errors, line=p.lineno) + values = [conv.visit(t) for t in p.bound.elts] elif p.bound is not None: bound = TypeConverter(self.errors, line=p.lineno).visit(p.bound) explicit_type_params.append(TypeParam(p.name, TYPE_VAR_KIND, bound, values)) @@ -1783,7 +1791,13 @@ def visit_TypeAlias(self, n: ast_TypeAlias) -> TypeAliasStmt | AssignmentStmt: if NEW_GENERIC_SYNTAX in self.options.enable_incomplete_feature: type_params = self.translate_type_params(n.type_params) value = self.visit(n.value) - node = TypeAliasStmt(self.visit_Name(n.name), type_params, value) + # Since the value is evaluated lazily, wrap the value inside a lambda. + # This helps mypyc. + ret = ReturnStmt(value) + self.set_line(ret, n.value) + value_func = LambdaExpr(body=Block([ret])) + self.set_line(value_func, n.value) + node = TypeAliasStmt(self.visit_Name(n.name), type_params, value_func) return self.set_line(node, n) else: self.fail( diff --git a/mypy/join.py b/mypy/join.py index c711697ec46db..5284be7dd2a14 100644 --- a/mypy/join.py +++ b/mypy/join.py @@ -5,6 +5,7 @@ from typing import Sequence, overload import mypy.typeops +from mypy.expandtype import expand_type from mypy.maptype import map_instance_to_supertype from mypy.nodes import CONTRAVARIANT, COVARIANT, INVARIANT, VARIANCE_NOT_READY from mypy.state import state @@ -36,6 +37,7 @@ TypedDictType, TypeOfAny, TypeType, + TypeVarId, TypeVarLikeType, TypeVarTupleType, TypeVarType, @@ -718,7 +720,35 @@ def is_similar_callables(t: CallableType, s: CallableType) -> bool: ) +def update_callable_ids(c: CallableType, ids: list[TypeVarId]) -> CallableType: + tv_map = {} + tvs = [] + for tv, new_id in zip(c.variables, ids): + new_tv = tv.copy_modified(id=new_id) + tvs.append(new_tv) + tv_map[tv.id] = new_tv + return expand_type(c, tv_map).copy_modified(variables=tvs) + + +def match_generic_callables(t: CallableType, s: CallableType) -> tuple[CallableType, CallableType]: + # The case where we combine/join/meet similar callables, situation where both are generic + # requires special care. A more principled solution may involve unify_generic_callable(), + # but it would have two problems: + # * This adds risk of infinite recursion: e.g. join -> unification -> solver -> join + # * Using unification is an incorrect thing for meets, as it "widens" the types + # Finally, this effectively falls back to an old behaviour before namespaces were added to + # type variables, and it worked relatively well. + max_len = max(len(t.variables), len(s.variables)) + min_len = min(len(t.variables), len(s.variables)) + if min_len == 0: + return t, s + new_ids = [TypeVarId.new(meta_level=0) for _ in range(max_len)] + # Note: this relies on variables being in order they appear in function definition. + return update_callable_ids(t, new_ids), update_callable_ids(s, new_ids) + + def join_similar_callables(t: CallableType, s: CallableType) -> CallableType: + t, s = match_generic_callables(t, s) arg_types: list[Type] = [] for i in range(len(t.arg_types)): arg_types.append(safe_meet(t.arg_types[i], s.arg_types[i])) @@ -771,6 +801,7 @@ def safe_meet(t: Type, s: Type) -> Type: def combine_similar_callables(t: CallableType, s: CallableType) -> CallableType: + t, s = match_generic_callables(t, s) arg_types: list[Type] = [] for i in range(len(t.arg_types)): arg_types.append(safe_join(t.arg_types[i], s.arg_types[i])) diff --git a/mypy/meet.py b/mypy/meet.py index df8b960cdf3f8..91abf43c0877b 100644 --- a/mypy/meet.py +++ b/mypy/meet.py @@ -7,6 +7,8 @@ from mypy.maptype import map_instance_to_supertype from mypy.state import state from mypy.subtypes import ( + are_parameters_compatible, + find_member, is_callable_compatible, is_equivalent, is_proper_subtype, @@ -256,12 +258,16 @@ def is_literal_in_union(x: ProperType, y: ProperType) -> bool: ) +def is_object(t: ProperType) -> bool: + return isinstance(t, Instance) and t.type.fullname == "builtins.object" + + def is_overlapping_types( left: Type, right: Type, ignore_promotions: bool = False, prohibit_none_typevar_overlap: bool = False, - ignore_uninhabited: bool = False, + overlap_for_overloads: bool = False, seen_types: set[tuple[Type, Type]] | None = None, ) -> bool: """Can a value of type 'left' also be of type 'right' or vice-versa? @@ -269,6 +275,9 @@ def is_overlapping_types( If 'ignore_promotions' is True, we ignore promotions while checking for overlaps. If 'prohibit_none_typevar_overlap' is True, we disallow None from overlapping with TypeVars (in both strict-optional and non-strict-optional mode). + If 'overlap_for_overloads' is True, we check for overlaps more strictly (to avoid false + positives), for example: None only overlaps with explicitly optional types, Any + doesn't overlap with anything except object, we don't ignore positional argument names. """ if isinstance(left, TypeGuardedType) or isinstance( # type: ignore[misc] right, TypeGuardedType @@ -295,7 +304,7 @@ def _is_overlapping_types(left: Type, right: Type) -> bool: right, ignore_promotions=ignore_promotions, prohibit_none_typevar_overlap=prohibit_none_typevar_overlap, - ignore_uninhabited=ignore_uninhabited, + overlap_for_overloads=overlap_for_overloads, seen_types=seen_types.copy(), ) @@ -324,7 +333,7 @@ def _is_overlapping_types(left: Type, right: Type) -> bool: # 'Any' may or may not be overlapping with the other type if isinstance(left, AnyType) or isinstance(right, AnyType): - return True + return not overlap_for_overloads or is_object(left) or is_object(right) # We check for complete overlaps next as a general-purpose failsafe. # If this check fails, we start checking to see if there exists a @@ -344,11 +353,25 @@ def _is_overlapping_types(left: Type, right: Type) -> bool: ): return True - if is_proper_subtype( - left, right, ignore_promotions=ignore_promotions, ignore_uninhabited=ignore_uninhabited - ) or is_proper_subtype( - right, left, ignore_promotions=ignore_promotions, ignore_uninhabited=ignore_uninhabited - ): + def is_none_object_overlap(t1: Type, t2: Type) -> bool: + t1, t2 = get_proper_types((t1, t2)) + return ( + isinstance(t1, NoneType) + and isinstance(t2, Instance) + and t2.type.fullname == "builtins.object" + ) + + if overlap_for_overloads: + if is_none_object_overlap(left, right) or is_none_object_overlap(right, left): + return False + + def _is_subtype(left: Type, right: Type) -> bool: + if overlap_for_overloads: + return is_proper_subtype(left, right, ignore_promotions=ignore_promotions) + else: + return is_subtype(left, right, ignore_promotions=ignore_promotions) + + if _is_subtype(left, right) or _is_subtype(right, left): return True # See the docstring for 'get_possible_variants' for more info on what the @@ -357,21 +380,6 @@ def _is_overlapping_types(left: Type, right: Type) -> bool: left_possible = get_possible_variants(left) right_possible = get_possible_variants(right) - # First handle special cases relating to PEP 612: - # - comparing a `Parameters` to a `Parameters` - # - comparing a `Parameters` to a `ParamSpecType` - # - comparing a `ParamSpecType` to a `ParamSpecType` - # - # These should all always be considered overlapping equality checks. - # These need to be done before we move on to other TypeVarLike comparisons. - if isinstance(left, (Parameters, ParamSpecType)) and isinstance( - right, (Parameters, ParamSpecType) - ): - return True - # A `Parameters` does not overlap with anything else, however - if isinstance(left, Parameters) or isinstance(right, Parameters): - return False - # Now move on to checking multi-variant types like Unions. We also perform # the same logic if either type happens to be a TypeVar/ParamSpec/TypeVarTuple. # @@ -421,7 +429,7 @@ def is_none_typevarlike_overlap(t1: Type, t2: Type) -> bool: # into their 'Instance' fallbacks. if isinstance(left, TypedDictType) and isinstance(right, TypedDictType): - return are_typed_dicts_overlapping(left, right, ignore_promotions=ignore_promotions) + return are_typed_dicts_overlapping(left, right, _is_overlapping_types) elif typed_dict_mapping_pair(left, right): # Overlaps between TypedDicts and Mappings require dedicated logic. return typed_dict_mapping_overlap(left, right, overlapping=_is_overlapping_types) @@ -431,7 +439,7 @@ def is_none_typevarlike_overlap(t1: Type, t2: Type) -> bool: right = right.fallback if is_tuple(left) and is_tuple(right): - return are_tuples_overlapping(left, right, ignore_promotions=ignore_promotions) + return are_tuples_overlapping(left, right, _is_overlapping_types) elif isinstance(left, TupleType): left = tuple_fallback(left) elif isinstance(right, TupleType): @@ -468,18 +476,44 @@ def _type_object_overlap(left: Type, right: Type) -> bool: if isinstance(left, TypeType) or isinstance(right, TypeType): return _type_object_overlap(left, right) or _type_object_overlap(right, left) + if isinstance(left, Parameters) and isinstance(right, Parameters): + return are_parameters_compatible( + left, + right, + is_compat=_is_overlapping_types, + is_proper_subtype=False, + ignore_pos_arg_names=not overlap_for_overloads, + allow_partial_overlap=True, + ) + # A `Parameters` does not overlap with anything else, however + if isinstance(left, Parameters) or isinstance(right, Parameters): + return False + if isinstance(left, CallableType) and isinstance(right, CallableType): return is_callable_compatible( left, right, is_compat=_is_overlapping_types, is_proper_subtype=False, - ignore_pos_arg_names=True, + ignore_pos_arg_names=not overlap_for_overloads, allow_partial_overlap=True, ) - elif isinstance(left, CallableType): + + call = None + other = None + if isinstance(left, CallableType) and isinstance(right, Instance): + call = find_member("__call__", right, right, is_operator=True) + other = left + if isinstance(right, CallableType) and isinstance(left, Instance): + call = find_member("__call__", left, left, is_operator=True) + other = right + if isinstance(get_proper_type(call), FunctionLike): + assert call is not None and other is not None + return _is_overlapping_types(call, other) + + if isinstance(left, CallableType): left = left.fallback - elif isinstance(right, CallableType): + if isinstance(right, CallableType): right = right.fallback if isinstance(left, LiteralType) and isinstance(right, LiteralType): @@ -500,11 +534,7 @@ def _type_object_overlap(left: Type, right: Type) -> bool: if isinstance(left, Instance) and isinstance(right, Instance): # First we need to handle promotions and structural compatibility for instances # that came as fallbacks, so simply call is_subtype() to avoid code duplication. - if is_subtype( - left, right, ignore_promotions=ignore_promotions, ignore_uninhabited=ignore_uninhabited - ) or is_subtype( - right, left, ignore_promotions=ignore_promotions, ignore_uninhabited=ignore_uninhabited - ): + if _is_subtype(left, right) or _is_subtype(right, left): return True if right.type.fullname == "builtins.int" and left.type.fullname in MYPYC_NATIVE_INT_NAMES: @@ -564,32 +594,21 @@ def is_overlapping_erased_types( def are_typed_dicts_overlapping( - left: TypedDictType, - right: TypedDictType, - *, - ignore_promotions: bool = False, - prohibit_none_typevar_overlap: bool = False, + left: TypedDictType, right: TypedDictType, is_overlapping: Callable[[Type, Type], bool] ) -> bool: """Returns 'true' if left and right are overlapping TypeDictTypes.""" # All required keys in left are present and overlapping with something in right for key in left.required_keys: if key not in right.items: return False - if not is_overlapping_types( - left.items[key], - right.items[key], - ignore_promotions=ignore_promotions, - prohibit_none_typevar_overlap=prohibit_none_typevar_overlap, - ): + if not is_overlapping(left.items[key], right.items[key]): return False # Repeat check in the other direction for key in right.required_keys: if key not in left.items: return False - if not is_overlapping_types( - left.items[key], right.items[key], ignore_promotions=ignore_promotions - ): + if not is_overlapping(left.items[key], right.items[key]): return False # The presence of any additional optional keys does not affect whether the two @@ -599,11 +618,7 @@ def are_typed_dicts_overlapping( def are_tuples_overlapping( - left: Type, - right: Type, - *, - ignore_promotions: bool = False, - prohibit_none_typevar_overlap: bool = False, + left: Type, right: Type, is_overlapping: Callable[[Type, Type], bool] ) -> bool: """Returns true if left and right are overlapping tuples.""" left, right = get_proper_types((left, right)) @@ -611,17 +626,43 @@ def are_tuples_overlapping( right = adjust_tuple(right, left) or right assert isinstance(left, TupleType), f"Type {left} is not a tuple" assert isinstance(right, TupleType), f"Type {right} is not a tuple" + + # This algorithm works well if only one tuple is variadic, if both are + # variadic we may get rare false negatives for overlapping prefix/suffix. + # Also, this ignores empty unpack case, but it is probably consistent with + # how we handle e.g. empty lists in overload overlaps. + # TODO: write a more robust algorithm for cases where both types are variadic. + left_unpack = find_unpack_in_list(left.items) + right_unpack = find_unpack_in_list(right.items) + if left_unpack is not None: + left = expand_tuple_if_possible(left, len(right.items)) + if right_unpack is not None: + right = expand_tuple_if_possible(right, len(left.items)) + if len(left.items) != len(right.items): return False - return all( - is_overlapping_types( - l, - r, - ignore_promotions=ignore_promotions, - prohibit_none_typevar_overlap=prohibit_none_typevar_overlap, - ) - for l, r in zip(left.items, right.items) - ) + return all(is_overlapping(l, r) for l, r in zip(left.items, right.items)) + + +def expand_tuple_if_possible(tup: TupleType, target: int) -> TupleType: + if len(tup.items) > target + 1: + return tup + extra = target + 1 - len(tup.items) + new_items = [] + for it in tup.items: + if not isinstance(it, UnpackType): + new_items.append(it) + continue + unpacked = get_proper_type(it.type) + if isinstance(unpacked, TypeVarTupleType): + instance = unpacked.tuple_fallback + else: + # Nested non-variadic tuples should be normalized at this point. + assert isinstance(unpacked, Instance) + instance = unpacked + assert instance.type.fullname == "builtins.tuple" + new_items.extend([instance.args[0]] * extra) + return tup.copy_modified(items=new_items) def adjust_tuple(left: ProperType, r: ProperType) -> TupleType | None: @@ -1024,8 +1065,9 @@ def default(self, typ: Type) -> ProperType: def meet_similar_callables(t: CallableType, s: CallableType) -> CallableType: - from mypy.join import safe_join + from mypy.join import match_generic_callables, safe_join + t, s = match_generic_callables(t, s) arg_types: list[Type] = [] for i in range(len(t.arg_types)): arg_types.append(safe_join(t.arg_types[i], s.arg_types[i])) diff --git a/mypy/message_registry.py b/mypy/message_registry.py index 3852431f2290f..befacc9e61822 100644 --- a/mypy/message_registry.py +++ b/mypy/message_registry.py @@ -180,6 +180,10 @@ def with_additional_msg(self, info: str) -> ErrorMessage: ) INVALID_UNPACK: Final = "{} cannot be unpacked (must be tuple or TypeVarTuple)" INVALID_UNPACK_POSITION: Final = "Unpack is only valid in a variadic position" +INVALID_PARAM_SPEC_LOCATION: Final = "Invalid location for ParamSpec {}" +INVALID_PARAM_SPEC_LOCATION_NOTE: Final = ( + 'You can use ParamSpec as the first argument to Callable, e.g., "Callable[{}, int]"' +) # TypeVar INCOMPATIBLE_TYPEVAR_VALUE: Final = 'Value of type variable "{}" of {} cannot be {}' @@ -330,3 +334,6 @@ def with_additional_msg(self, info: str) -> ErrorMessage: NARROWED_TYPE_NOT_SUBTYPE: Final = ErrorMessage( "Narrowed type {} is not a subtype of input type {}", codes.NARROWED_TYPE_NOT_SUBTYPE ) +TYPE_VAR_TOO_FEW_CONSTRAINED_TYPES: Final = ErrorMessage( + "Type variable must have at least two constrained types", codes.MISC +) diff --git a/mypy/messages.py b/mypy/messages.py index de079feda048c..62846c536f3d2 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -83,12 +83,14 @@ TypeOfAny, TypeStrVisitor, TypeType, + TypeVarLikeType, TypeVarTupleType, TypeVarType, UnboundType, UninhabitedType, UnionType, UnpackType, + flatten_nested_unions, get_proper_type, get_proper_types, ) @@ -144,6 +146,9 @@ "numbers.Integral", } +MAX_TUPLE_ITEMS = 10 +MAX_UNION_ITEMS = 10 + class MessageBuilder: """Helper class for reporting type checker error messages with parameters. @@ -1623,13 +1628,21 @@ def overload_inconsistently_applies_decorator(self, decorator: str, context: Con context, ) - def overloaded_signatures_overlap(self, index1: int, index2: int, context: Context) -> None: + def overloaded_signatures_overlap( + self, index1: int, index2: int, flip_note: bool, context: Context + ) -> None: self.fail( "Overloaded function signatures {} and {} overlap with " "incompatible return types".format(index1, index2), context, code=codes.OVERLOAD_OVERLAP, ) + if flip_note: + self.note( + "Flipping the order of overloads will fix this error", + context, + code=codes.OVERLOAD_OVERLAP, + ) def overloaded_signature_will_never_match( self, index1: int, index2: int, context: Context @@ -2329,7 +2342,7 @@ def try_report_long_tuple_assignment_error( """ if isinstance(subtype, TupleType): if ( - len(subtype.items) > 10 + len(subtype.items) > MAX_TUPLE_ITEMS and isinstance(supertype, Instance) and supertype.type.fullname == "builtins.tuple" ): @@ -2338,7 +2351,7 @@ def try_report_long_tuple_assignment_error( self.generate_incompatible_tuple_error(lhs_types, subtype.items, context, msg) return True elif isinstance(supertype, TupleType) and ( - len(subtype.items) > 10 or len(supertype.items) > 10 + len(subtype.items) > MAX_TUPLE_ITEMS or len(supertype.items) > MAX_TUPLE_ITEMS ): if len(subtype.items) != len(supertype.items): if supertype_label is not None and subtype_label is not None: @@ -2361,7 +2374,7 @@ def try_report_long_tuple_assignment_error( def format_long_tuple_type(self, typ: TupleType) -> str: """Format very long tuple type using an ellipsis notation""" item_cnt = len(typ.items) - if item_cnt > 10: + if item_cnt > MAX_TUPLE_ITEMS: return "{}[{}, {}, ... <{} more items>]".format( "tuple" if self.options.use_lowercase_names() else "Tuple", format_type_bare(typ.items[0], self.options), @@ -2488,11 +2501,21 @@ def format(typ: Type) -> str: def format_list(types: Sequence[Type]) -> str: return ", ".join(format(typ) for typ in types) - def format_union(types: Sequence[Type]) -> str: + def format_union_items(types: Sequence[Type]) -> list[str]: formatted = [format(typ) for typ in types if format(typ) != "None"] + if len(formatted) > MAX_UNION_ITEMS and verbosity == 0: + more = len(formatted) - MAX_UNION_ITEMS // 2 + formatted = formatted[: MAX_UNION_ITEMS // 2] + else: + more = 0 + if more: + formatted.append(f"<{more} more items>") if any(format(typ) == "None" for typ in types): formatted.append("None") - return " | ".join(formatted) + return formatted + + def format_union(types: Sequence[Type]) -> str: + return " | ".join(format_union_items(types)) def format_literal_value(typ: LiteralType) -> str: if typ.is_enum_literal(): @@ -2502,14 +2525,16 @@ def format_literal_value(typ: LiteralType) -> str: return typ.value_repr() if isinstance(typ, TypeAliasType) and typ.is_recursive: - # TODO: find balance here, str(typ) doesn't support custom verbosity, and may be - # too verbose for user messages, OTOH it nicely shows structure of recursive types. - if verbosity < 2: - type_str = typ.alias.name if typ.alias else "" + if typ.alias is None: + type_str = "" + else: + if verbosity >= 2 or (fullnames and typ.alias.fullname in fullnames): + type_str = typ.alias.fullname + else: + type_str = typ.alias.name if typ.args: type_str += f"[{format_list(typ.args)}]" - return type_str - return str(typ) + return type_str # TODO: always mention type alias names in errors. typ = get_proper_type(typ) @@ -2550,9 +2575,15 @@ def format_literal_value(typ: LiteralType) -> str: return f"Unpack[{format(typ.type)}]" elif isinstance(typ, TypeVarType): # This is similar to non-generic instance types. + fullname = scoped_type_var_name(typ) + if verbosity >= 2 or (fullnames and fullname in fullnames): + return fullname return typ.name elif isinstance(typ, TypeVarTupleType): # This is similar to non-generic instance types. + fullname = scoped_type_var_name(typ) + if verbosity >= 2 or (fullnames and fullname in fullnames): + return fullname return typ.name elif isinstance(typ, ParamSpecType): # Concatenate[..., P] @@ -2563,6 +2594,7 @@ def format_literal_value(typ: LiteralType) -> str: return f"[{args}, **{typ.name_with_suffix()}]" else: + # TODO: better disambiguate ParamSpec name clashes. return typ.name_with_suffix() elif isinstance(typ, TupleType): # Prefer the name of the fallback class (if not tuple), as it's more informative. @@ -2587,6 +2619,9 @@ def format_literal_value(typ: LiteralType) -> str: elif isinstance(typ, LiteralType): return f"Literal[{format_literal_value(typ)}]" elif isinstance(typ, UnionType): + typ = get_proper_type(ignore_last_known_values(typ)) + if not isinstance(typ, UnionType): + return format(typ) literal_items, union_items = separate_union_literals(typ) # Coalesce multiple Literal[] members. This also changes output order. @@ -2606,7 +2641,7 @@ def format_literal_value(typ: LiteralType) -> str: return ( f"{literal_str} | {format_union(union_items)}" if options.use_or_syntax() - else f"Union[{format_list(union_items)}, {literal_str}]" + else f"Union[{', '.join(format_union_items(union_items))}, {literal_str}]" ) else: return literal_str @@ -2627,7 +2662,7 @@ def format_literal_value(typ: LiteralType) -> str: s = ( format_union(typ.items) if options.use_or_syntax() - else f"Union[{format_list(typ.items)}]" + else f"Union[{', '.join(format_union_items(typ.items))}]" ) return s elif isinstance(typ, NoneType): @@ -2680,29 +2715,51 @@ def format_literal_value(typ: LiteralType) -> str: return "object" -def collect_all_instances(t: Type) -> list[Instance]: - """Return all instances that `t` contains (including `t`). +def collect_all_named_types(t: Type) -> list[Type]: + """Return all instances/aliases/type variables that `t` contains (including `t`). This is similar to collect_all_inner_types from typeanal but only returns instances and will recurse into fallbacks. """ - visitor = CollectAllInstancesQuery() + visitor = CollectAllNamedTypesQuery() t.accept(visitor) - return visitor.instances + return visitor.types -class CollectAllInstancesQuery(TypeTraverserVisitor): +class CollectAllNamedTypesQuery(TypeTraverserVisitor): def __init__(self) -> None: - self.instances: list[Instance] = [] + self.types: list[Type] = [] def visit_instance(self, t: Instance) -> None: - self.instances.append(t) + self.types.append(t) super().visit_instance(t) def visit_type_alias_type(self, t: TypeAliasType) -> None: if t.alias and not t.is_recursive: - t.alias.target.accept(self) - super().visit_type_alias_type(t) + get_proper_type(t).accept(self) + else: + self.types.append(t) + super().visit_type_alias_type(t) + + def visit_type_var(self, t: TypeVarType) -> None: + self.types.append(t) + super().visit_type_var(t) + + def visit_type_var_tuple(self, t: TypeVarTupleType) -> None: + self.types.append(t) + super().visit_type_var_tuple(t) + + def visit_param_spec(self, t: ParamSpecType) -> None: + self.types.append(t) + super().visit_param_spec(t) + + +def scoped_type_var_name(t: TypeVarLikeType) -> str: + if not t.id.namespace: + return t.name + # TODO: support rare cases when both TypeVar name and namespace suffix coincide. + *_, suffix = t.id.namespace.split(".") + return f"{t.name}@{suffix}" def find_type_overlaps(*types: Type) -> set[str]: @@ -2713,8 +2770,14 @@ def find_type_overlaps(*types: Type) -> set[str]: """ d: dict[str, set[str]] = {} for type in types: - for inst in collect_all_instances(type): - d.setdefault(inst.type.name, set()).add(inst.type.fullname) + for t in collect_all_named_types(type): + if isinstance(t, ProperType) and isinstance(t, Instance): + d.setdefault(t.type.name, set()).add(t.type.fullname) + elif isinstance(t, TypeAliasType) and t.alias: + d.setdefault(t.alias.name, set()).add(t.alias.fullname) + else: + assert isinstance(t, TypeVarLikeType) + d.setdefault(t.name, set()).add(scoped_type_var_name(t)) for shortname in d.keys(): if f"typing.{shortname}" in TYPES_FOR_UNIMPORTED_HINTS: d[shortname].add(f"typing.{shortname}") @@ -2732,7 +2795,7 @@ def format_type( """ Convert a type to a relatively short string suitable for error messages. - `verbosity` is a coarse grained control on the verbosity of the type + `verbosity` is a coarse-grained control on the verbosity of the type This function returns a string appropriate for unmodified use in error messages; this means that it will be quoted in most cases. If @@ -2748,7 +2811,7 @@ def format_type_bare( """ Convert a type to a relatively short string suitable for error messages. - `verbosity` is a coarse grained control on the verbosity of the type + `verbosity` is a coarse-grained control on the verbosity of the type `fullnames` specifies a set of names that should be printed in full This function will return an unquoted string. If a caller doesn't need to @@ -2882,10 +2945,10 @@ def [T <: int] f(self, x: int, y: T) -> None isinstance(upper_bound, Instance) and upper_bound.type.fullname != "builtins.object" ): - tvars.append(f"{tvar.name} <: {format_type_bare(upper_bound, options)}") + tvars.append(f"{tvar.name}: {format_type_bare(upper_bound, options)}") elif tvar.values: tvars.append( - "{} in ({})".format( + "{}: ({})".format( tvar.name, ", ".join([format_type_bare(tp, options) for tp in tvar.values]), ) @@ -3136,6 +3199,23 @@ def append_invariance_notes( return notes +def append_union_note( + notes: list[str], arg_type: UnionType, expected_type: UnionType, options: Options +) -> list[str]: + """Point to specific union item(s) that may cause failure in subtype check.""" + non_matching = [] + items = flatten_nested_unions(arg_type.items) + if len(items) < MAX_UNION_ITEMS: + return notes + for item in items: + if not is_subtype(item, expected_type): + non_matching.append(item) + if non_matching: + types = ", ".join([format_type(typ, options) for typ in non_matching]) + notes.append(f"Item{plural_s(non_matching)} in the first union not in the second: {types}") + return notes + + def append_numbers_notes( notes: list[str], arg_type: Instance, expected_type: Instance ) -> list[str]: @@ -3189,3 +3269,23 @@ def format_key_list(keys: list[str], *, short: bool = False) -> str: return f"{td}key {formatted_keys[0]}" else: return f"{td}keys ({', '.join(formatted_keys)})" + + +def ignore_last_known_values(t: UnionType) -> Type: + """This will avoid types like str | str in error messages. + + last_known_values are kept during union simplification, but may cause + weird formatting for e.g. tuples of literals. + """ + union_items: list[Type] = [] + seen_instances = set() + for item in t.items: + if isinstance(item, ProperType) and isinstance(item, Instance): + erased = item.copy_modified(last_known_value=None) + if erased in seen_instances: + continue + seen_instances.add(erased) + union_items.append(erased) + else: + union_items.append(item) + return UnionType.make_union(union_items, t.line, t.column) diff --git a/mypy/nodes.py b/mypy/nodes.py index dbde3ddf4f1b5..d215bcfce098a 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -175,6 +175,8 @@ def get_nongen_builtins(python_version: tuple[int, int]) -> dict[str, str]: "typing_extensions.runtime_checkable", ) +LAMBDA_NAME: Final = "" + class Node(Context): """Common base class for all non-type parse tree nodes.""" @@ -1653,10 +1655,10 @@ class TypeAliasStmt(Statement): name: NameExpr type_args: list[TypeParam] - value: Expression # Will get translated into a type + value: LambdaExpr # Return value will get translated into a type invalid_recursive_alias: bool - def __init__(self, name: NameExpr, type_args: list[TypeParam], value: Expression) -> None: + def __init__(self, name: NameExpr, type_args: list[TypeParam], value: LambdaExpr) -> None: super().__init__() self.name = name self.type_args = type_args @@ -2262,7 +2264,7 @@ class LambdaExpr(FuncItem, Expression): @property def name(self) -> str: - return "" + return LAMBDA_NAME def expr(self) -> Expression: """Return the expression (the body) of the lambda.""" @@ -2535,8 +2537,9 @@ def __init__( default: mypy.types.Type, variance: int = INVARIANT, is_new_style: bool = False, + line: int = -1, ) -> None: - super().__init__() + super().__init__(line=line) self._name = name self._fullname = fullname self.upper_bound = upper_bound @@ -2582,8 +2585,9 @@ def __init__( default: mypy.types.Type, variance: int = INVARIANT, is_new_style: bool = False, + line: int = -1, ) -> None: - super().__init__(name, fullname, upper_bound, default, variance, is_new_style) + super().__init__(name, fullname, upper_bound, default, variance, is_new_style, line=line) self.values = values def accept(self, visitor: ExpressionVisitor[T]) -> T: @@ -2661,8 +2665,9 @@ def __init__( default: mypy.types.Type, variance: int = INVARIANT, is_new_style: bool = False, + line: int = -1, ) -> None: - super().__init__(name, fullname, upper_bound, default, variance, is_new_style) + super().__init__(name, fullname, upper_bound, default, variance, is_new_style, line=line) self.tuple_fallback = tuple_fallback def accept(self, visitor: ExpressionVisitor[T]) -> T: @@ -3578,6 +3583,7 @@ def f(x: B[T]) -> T: ... # without T, Any would be used here "_is_recursive", "eager", "tvar_tuple_index", + "python_3_12_type_alias", ) __match_args__ = ("name", "target", "alias_tvars", "no_args") @@ -3593,6 +3599,7 @@ def __init__( no_args: bool = False, normalized: bool = False, eager: bool = False, + python_3_12_type_alias: bool = False, ) -> None: self._fullname = fullname self.target = target @@ -3605,6 +3612,7 @@ def __init__( # it is the cached value. self._is_recursive: bool | None = None self.eager = eager + self.python_3_12_type_alias = python_3_12_type_alias self.tvar_tuple_index = None for i, t in enumerate(alias_tvars): if isinstance(t, mypy.types.TypeVarTupleType): @@ -3675,6 +3683,7 @@ def serialize(self) -> JsonDict: "normalized": self.normalized, "line": self.line, "column": self.column, + "python_3_12_type_alias": self.python_3_12_type_alias, } return data @@ -3692,6 +3701,7 @@ def deserialize(cls, data: JsonDict) -> TypeAlias: normalized = data["normalized"] line = data["line"] column = data["column"] + python_3_12_type_alias = data["python_3_12_type_alias"] return cls( target, fullname, @@ -3700,6 +3710,7 @@ def deserialize(cls, data: JsonDict) -> TypeAlias: alias_tvars=cast(List[mypy.types.TypeVarLikeType], alias_tvars), no_args=no_args, normalized=normalized, + python_3_12_type_alias=python_3_12_type_alias, ) diff --git a/mypy/plugin.py b/mypy/plugin.py index 38016191de8f6..858795addb7f8 100644 --- a/mypy/plugin.py +++ b/mypy/plugin.py @@ -328,7 +328,6 @@ def anal_type( allow_tuple_literal: bool = False, allow_unbound_tvars: bool = False, report_invalid_types: bool = True, - third_pass: bool = False, ) -> Type | None: """Analyze an unbound type. diff --git a/mypy/plugins/attrs.py b/mypy/plugins/attrs.py index 83f685f57a16d..db976385ee56e 100644 --- a/mypy/plugins/attrs.py +++ b/mypy/plugins/attrs.py @@ -69,6 +69,7 @@ Type, TypeOfAny, TypeType, + TypeVarId, TypeVarType, UninhabitedType, UnionType, @@ -807,25 +808,25 @@ def _add_order(ctx: mypy.plugin.ClassDefContext, adder: MethodAdder) -> None: # AT = TypeVar('AT') # def __lt__(self: AT, other: AT) -> bool # This way comparisons with subclasses will work correctly. + fullname = f"{ctx.cls.info.fullname}.{SELF_TVAR_NAME}" tvd = TypeVarType( SELF_TVAR_NAME, - ctx.cls.info.fullname + "." + SELF_TVAR_NAME, - id=-1, + fullname, + # Namespace is patched per-method below. + id=TypeVarId(-1, namespace=""), values=[], upper_bound=object_type, default=AnyType(TypeOfAny.from_omitted_generics), ) self_tvar_expr = TypeVarExpr( - SELF_TVAR_NAME, - ctx.cls.info.fullname + "." + SELF_TVAR_NAME, - [], - object_type, - AnyType(TypeOfAny.from_omitted_generics), + SELF_TVAR_NAME, fullname, [], object_type, AnyType(TypeOfAny.from_omitted_generics) ) ctx.cls.info.names[SELF_TVAR_NAME] = SymbolTableNode(MDEF, self_tvar_expr) - args = [Argument(Var("other", tvd), tvd, None, ARG_POS)] for method in ["__lt__", "__le__", "__gt__", "__ge__"]: + namespace = f"{ctx.cls.info.fullname}.{method}" + tvd = tvd.copy_modified(id=TypeVarId(tvd.id.raw_id, namespace=namespace)) + args = [Argument(Var("other", tvd), tvd, None, ARG_POS)] adder.add_method(method, args, bool_type, self_type=tvd, tvd=tvd) diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py index dead512a22023..dd2eceab217f7 100644 --- a/mypy/plugins/dataclasses.py +++ b/mypy/plugins/dataclasses.py @@ -65,6 +65,7 @@ TupleType, Type, TypeOfAny, + TypeVarId, TypeVarType, UninhabitedType, UnionType, @@ -314,8 +315,8 @@ def transform(self) -> bool: obj_type = self._api.named_type("builtins.object") order_tvar_def = TypeVarType( SELF_TVAR_NAME, - info.fullname + "." + SELF_TVAR_NAME, - id=-1, + f"{info.fullname}.{SELF_TVAR_NAME}", + id=TypeVarId(-1, namespace=f"{info.fullname}.{method_name}"), values=[], upper_bound=obj_type, default=AnyType(TypeOfAny.from_omitted_generics), diff --git a/mypy/plugins/default.py b/mypy/plugins/default.py index 3ad301a15f6cf..5139b9b82289a 100644 --- a/mypy/plugins/default.py +++ b/mypy/plugins/default.py @@ -41,7 +41,7 @@ class DefaultPlugin(Plugin): """Type checker plugin that is enabled by default.""" def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None: - from mypy.plugins import ctypes, singledispatch + from mypy.plugins import ctypes, enums, singledispatch if fullname == "_ctypes.Array": return ctypes.array_constructor_callback @@ -51,6 +51,8 @@ def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] import mypy.plugins.functools return mypy.plugins.functools.partial_new_callback + elif fullname == "enum.member": + return enums.enum_member_callback return None diff --git a/mypy/plugins/enums.py b/mypy/plugins/enums.py index 83350fe2fe112..816241fa6e9a6 100644 --- a/mypy/plugins/enums.py +++ b/mypy/plugins/enums.py @@ -20,7 +20,15 @@ from mypy.semanal_enum import ENUM_BASES from mypy.subtypes import is_equivalent from mypy.typeops import fixup_partial_type, make_simplified_union -from mypy.types import CallableType, Instance, LiteralType, ProperType, Type, get_proper_type +from mypy.types import ( + CallableType, + Instance, + LiteralType, + ProperType, + Type, + get_proper_type, + is_named_instance, +) ENUM_NAME_ACCESS: Final = {f"{prefix}.name" for prefix in ENUM_BASES} | { f"{prefix}._name_" for prefix in ENUM_BASES @@ -79,6 +87,8 @@ def _infer_value_type_with_auto_fallback( return None proper_type = get_proper_type(fixup_partial_type(proper_type)) if not (isinstance(proper_type, Instance) and proper_type.type.fullname == "enum.auto"): + if is_named_instance(proper_type, "enum.member") and proper_type.args: + return proper_type.args[0] return proper_type assert isinstance(ctx.type, Instance), "An incorrect ctx.type was passed." info = ctx.type.type @@ -118,6 +128,22 @@ def _implements_new(info: TypeInfo) -> bool: return type_with_new.fullname not in ("enum.Enum", "enum.IntEnum", "enum.StrEnum") +def enum_member_callback(ctx: mypy.plugin.FunctionContext) -> Type: + """By default `member(1)` will be infered as `member[int]`, + we want to improve the inference to be `Literal[1]` here.""" + if ctx.arg_types or ctx.arg_types[0]: + arg = get_proper_type(ctx.arg_types[0][0]) + proper_return = get_proper_type(ctx.default_return_type) + if ( + isinstance(arg, Instance) + and arg.last_known_value + and isinstance(proper_return, Instance) + and len(proper_return.args) == 1 + ): + return proper_return.copy_modified(args=[arg]) + return ctx.default_return_type + + def enum_value_callback(ctx: mypy.plugin.AttributeContext) -> Type: """This plugin refines the 'value' attribute in enums to refer to the original underlying value. For example, suppose we have the @@ -159,7 +185,7 @@ class SomeEnum: stnodes = (info.get(name) for name in info.names) - # Enums _can_ have methods and instance attributes. + # Enums _can_ have methods, instance attributes, and `nonmember`s. # Omit methods and attributes created by assigning to self.* # for our value inference. node_types = ( @@ -170,7 +196,8 @@ class SomeEnum: proper_types = [ _infer_value_type_with_auto_fallback(ctx, t) for t in node_types - if t is None or not isinstance(t, CallableType) + if t is None + or (not isinstance(t, CallableType) and not is_named_instance(t, "enum.nonmember")) ] underlying_type = _first(proper_types) if underlying_type is None: diff --git a/mypy/plugins/functools.py b/mypy/plugins/functools.py index 7ff30ab2d4f4b..35e3f70c0620c 100644 --- a/mypy/plugins/functools.py +++ b/mypy/plugins/functools.py @@ -6,8 +6,9 @@ import mypy.checker import mypy.plugin +import mypy.semanal from mypy.argmap import map_actuals_to_formals -from mypy.nodes import ARG_POS, ARG_STAR2, ArgKind, Argument, FuncItem, Var +from mypy.nodes import ARG_POS, ARG_STAR2, ArgKind, Argument, CallExpr, FuncItem, Var from mypy.plugins.common import add_method_to_class from mypy.types import ( AnyType, @@ -17,7 +18,6 @@ Type, TypeOfAny, UnboundType, - UninhabitedType, get_proper_type, ) @@ -25,6 +25,8 @@ _ORDERING_METHODS: Final = {"__lt__", "__le__", "__gt__", "__ge__"} +PARTIAL: Final = "functools.partial" + class _MethodInfo(NamedTuple): is_static: bool @@ -132,6 +134,23 @@ def partial_new_callback(ctx: mypy.plugin.FunctionContext) -> Type: if fn_type is None: return ctx.default_return_type + # We must normalize from the start to have coherent view together with TypeChecker. + fn_type = fn_type.with_unpacked_kwargs().with_normalized_var_args() + + last_context = ctx.api.type_context[-1] + if not fn_type.is_type_obj(): + # We wrap the return type to get use of a possible type context provided by caller. + # We cannot do this in case of class objects, since otherwise the plugin may get + # falsely triggered when evaluating the constructed call itself. + ret_type: Type = ctx.api.named_generic_type(PARTIAL, [fn_type.ret_type]) + wrapped_return = True + else: + ret_type = fn_type.ret_type + # Instead, for class objects we ignore any type context to avoid spurious errors, + # since the type context will be partial[X] etc., not X. + ctx.api.type_context[-1] = None + wrapped_return = False + defaulted = fn_type.copy_modified( arg_kinds=[ ( @@ -141,28 +160,65 @@ def partial_new_callback(ctx: mypy.plugin.FunctionContext) -> Type: ) for k in fn_type.arg_kinds ], + ret_type=ret_type, special_sig="partial", ) if defaulted.line < 0: # Make up a line number if we don't have one defaulted.set_line(ctx.default_return_type) - actual_args = [a for param in ctx.args[1:] for a in param] - actual_arg_kinds = [a for param in ctx.arg_kinds[1:] for a in param] - actual_arg_names = [a for param in ctx.arg_names[1:] for a in param] - actual_types = [a for param in ctx.arg_types[1:] for a in param] + # Flatten actual to formal mapping, since this is what check_call() expects. + actual_args = [] + actual_arg_kinds = [] + actual_arg_names = [] + actual_types = [] + seen_args = set() + for i, param in enumerate(ctx.args[1:], start=1): + for j, a in enumerate(param): + if a in seen_args: + # Same actual arg can map to multiple formals, but we need to include + # each one only once. + continue + # Here we rely on the fact that expressions are essentially immutable, so + # they can be compared by identity. + seen_args.add(a) + actual_args.append(a) + actual_arg_kinds.append(ctx.arg_kinds[i][j]) + actual_arg_names.append(ctx.arg_names[i][j]) + actual_types.append(ctx.arg_types[i][j]) + + # Create a valid context for various ad-hoc inspections in check_call(). + call_expr = CallExpr( + callee=ctx.args[0][0], + args=actual_args, + arg_kinds=actual_arg_kinds, + arg_names=actual_arg_names, + analyzed=ctx.context.analyzed if isinstance(ctx.context, CallExpr) else None, + ) + call_expr.set_line(ctx.context) _, bound = ctx.api.expr_checker.check_call( callee=defaulted, args=actual_args, arg_kinds=actual_arg_kinds, arg_names=actual_arg_names, - context=defaulted, + context=call_expr, ) + if not wrapped_return: + # Restore previously ignored context. + ctx.api.type_context[-1] = last_context + bound = get_proper_type(bound) if not isinstance(bound, CallableType): return ctx.default_return_type + if wrapped_return: + # Reverse the wrapping we did above. + ret_type = get_proper_type(bound.ret_type) + if not isinstance(ret_type, Instance) or ret_type.type.fullname != PARTIAL: + return ctx.default_return_type + bound = bound.copy_modified(ret_type=ret_type.args[0]) + formal_to_actual = map_actuals_to_formals( actual_kinds=actual_arg_kinds, actual_names=actual_arg_names, @@ -179,7 +235,7 @@ def partial_new_callback(ctx: mypy.plugin.FunctionContext) -> Type: for i, actuals in enumerate(formal_to_actual): if len(bound.arg_types) == len(fn_type.arg_types): arg_type = bound.arg_types[i] - if isinstance(get_proper_type(arg_type), UninhabitedType): + if not mypy.checker.is_valid_inferred_type(arg_type): arg_type = fn_type.arg_types[i] # bit of a hack else: # TODO: I assume that bound and fn_type have the same arguments. It appears this isn't @@ -201,7 +257,7 @@ def partial_new_callback(ctx: mypy.plugin.FunctionContext) -> Type: partial_names.append(fn_type.arg_names[i]) ret_type = bound.ret_type - if isinstance(get_proper_type(ret_type), UninhabitedType): + if not mypy.checker.is_valid_inferred_type(ret_type): ret_type = fn_type.ret_type # same kind of hack as above partially_applied = fn_type.copy_modified( @@ -215,7 +271,7 @@ def partial_new_callback(ctx: mypy.plugin.FunctionContext) -> Type: ), ) - ret = ctx.api.named_generic_type("functools.partial", [ret_type]) + ret = ctx.api.named_generic_type(PARTIAL, [ret_type]) ret = ret.copy_with_extra_attr("__mypy_partial", partially_applied) return ret @@ -225,7 +281,7 @@ def partial_call_callback(ctx: mypy.plugin.MethodContext) -> Type: if ( not isinstance(ctx.api, mypy.checker.TypeChecker) # use internals or not isinstance(ctx.type, Instance) - or ctx.type.type.fullname != "functools.partial" + or ctx.type.type.fullname != PARTIAL or not ctx.type.extra_attrs or "__mypy_partial" not in ctx.type.extra_attrs.attrs ): diff --git a/mypy/semanal.py b/mypy/semanal.py index 44db7ddf5618e..f857c3e733819 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -59,6 +59,7 @@ from mypy.errorcodes import PROPERTY_DECORATOR, ErrorCode from mypy.errors import Errors, report_internal_error from mypy.exprtotype import TypeTranslationError, expr_to_unanalyzed_type +from mypy.message_registry import ErrorMessage from mypy.messages import ( SUGGESTED_TEST_FIXTURES, TYPES_FOR_UNIMPORTED_HINTS, @@ -279,6 +280,7 @@ TypedDictType, TypeOfAny, TypeType, + TypeVarId, TypeVarLikeType, TypeVarTupleType, TypeVarType, @@ -478,6 +480,9 @@ def __init__( # new uses of this, as this may cause leaking `UnboundType`s to type checking. self.allow_unbound_tvars = False + # Used to pass information about current overload index to visit_func_def(). + self.current_overload_item: int | None = None + # mypyc doesn't properly handle implementing an abstractproperty # with a regular attribute so we make them properties @property @@ -868,6 +873,11 @@ def visit_func_def(self, defn: FuncDef) -> None: with self.scope.function_scope(defn): self.analyze_func_def(defn) + def function_fullname(self, fullname: str) -> str: + if self.current_overload_item is None: + return fullname + return f"{fullname}#{self.current_overload_item}" + def analyze_func_def(self, defn: FuncDef) -> None: if self.push_type_args(defn.type_args, defn) is None: self.defer(defn) @@ -894,7 +904,8 @@ def analyze_func_def(self, defn: FuncDef) -> None: self.prepare_method_signature(defn, self.type, has_self_type) # Analyze function signature - with self.tvar_scope_frame(self.tvar_scope.method_frame()): + fullname = self.function_fullname(defn.fullname) + with self.tvar_scope_frame(self.tvar_scope.method_frame(fullname)): if defn.type: self.check_classvar_in_signature(defn.type) assert isinstance(defn.type, CallableType) @@ -902,11 +913,11 @@ def analyze_func_def(self, defn: FuncDef) -> None: # class-level imported names and type variables are in scope. analyzer = self.type_analyzer() tag = self.track_incomplete_refs() - result = analyzer.visit_callable_type(defn.type, nested=False) + result = analyzer.visit_callable_type(defn.type, nested=False, namespace=fullname) # Don't store not ready types (including placeholders). if self.found_incomplete_ref(tag) or has_placeholder(result): self.defer(defn) - # TODO: pop type args + self.pop_type_args(defn.type_args) return assert isinstance(result, ProperType) if isinstance(result, CallableType): @@ -1114,7 +1125,8 @@ def update_function_type_variables(self, fun_type: CallableType, defn: FuncItem) if defn is generic. Return True, if the signature contains typing.Self type, or False otherwise. """ - with self.tvar_scope_frame(self.tvar_scope.method_frame()): + fullname = self.function_fullname(defn.fullname) + with self.tvar_scope_frame(self.tvar_scope.method_frame(fullname)): a = self.type_analyzer() fun_type.variables, has_self_type = a.bind_function_type_variables(fun_type, defn) if has_self_type and self.type is not None: @@ -1152,7 +1164,7 @@ def setup_self_type(self) -> None: info.self_type = TypeVarType( "Self", f"{info.fullname}.Self", - id=0, + id=TypeVarId(0), # 0 is a special value for self-types. values=[], upper_bound=fill_typevars(info), default=AnyType(TypeOfAny.from_omitted_generics), @@ -1172,6 +1184,14 @@ def visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None: with self.scope.function_scope(defn): self.analyze_overloaded_func_def(defn) + @contextmanager + def overload_item_set(self, item: int | None) -> Iterator[None]: + self.current_overload_item = item + try: + yield + finally: + self.current_overload_item = None + def analyze_overloaded_func_def(self, defn: OverloadedFuncDef) -> None: # OverloadedFuncDef refers to any legitimate situation where you have # more than one declaration for the same function in a row. This occurs @@ -1184,7 +1204,8 @@ def analyze_overloaded_func_def(self, defn: OverloadedFuncDef) -> None: first_item = defn.items[0] first_item.is_overload = True - first_item.accept(self) + with self.overload_item_set(0): + first_item.accept(self) if isinstance(first_item, Decorator) and first_item.func.is_property: # This is a property. @@ -1269,7 +1290,8 @@ def analyze_overload_sigs_and_impl( if i != 0: # Assume that the first item was already visited item.is_overload = True - item.accept(self) + with self.overload_item_set(i if i < len(defn.items) - 1 else None): + item.accept(self) # TODO: support decorated overloaded functions properly if isinstance(item, Decorator): callable = function_type(item.func, self.named_type("builtins.function")) @@ -1441,7 +1463,8 @@ def add_function_to_symbol_table(self, func: FuncDef | OverloadedFuncDef) -> Non self.add_symbol(func.name, func, func) def analyze_arg_initializers(self, defn: FuncItem) -> None: - with self.tvar_scope_frame(self.tvar_scope.method_frame()): + fullname = self.function_fullname(defn.fullname) + with self.tvar_scope_frame(self.tvar_scope.method_frame(fullname)): # Analyze default arguments for arg in defn.arguments: if arg.initializer: @@ -1449,7 +1472,8 @@ def analyze_arg_initializers(self, defn: FuncItem) -> None: def analyze_function_body(self, defn: FuncItem) -> None: is_method = self.is_class_scope() - with self.tvar_scope_frame(self.tvar_scope.method_frame()): + fullname = self.function_fullname(defn.fullname) + with self.tvar_scope_frame(self.tvar_scope.method_frame(fullname)): # Bind the type variables again to visit the body. if defn.type: a = self.type_analyzer() @@ -1686,7 +1710,7 @@ def push_type_args( self.scope_stack.append(SCOPE_ANNOTATION) tvs: list[tuple[str, TypeVarLikeExpr]] = [] for p in type_args: - tv = self.analyze_type_param(p) + tv = self.analyze_type_param(p, context) if tv is None: return None tvs.append((p.name, tv)) @@ -1709,22 +1733,30 @@ def is_defined_type_param(self, name: str) -> bool: return True return False - def analyze_type_param(self, type_param: TypeParam) -> TypeVarLikeExpr | None: + def analyze_type_param( + self, type_param: TypeParam, context: Context + ) -> TypeVarLikeExpr | None: fullname = self.qualified_name(type_param.name) if type_param.upper_bound: - upper_bound = self.anal_type(type_param.upper_bound) + upper_bound = self.anal_type(type_param.upper_bound, allow_placeholder=True) + # TODO: we should validate the upper bound is valid for a given kind. if upper_bound is None: - return None + # This and below copies special-casing for old-style type variables, that + # is equally necessary for new-style classes to break a vicious circle. + upper_bound = PlaceholderType(None, [], context.line) else: - upper_bound = self.named_type("builtins.object") + if type_param.kind == TYPE_VAR_TUPLE_KIND: + upper_bound = self.named_type("builtins.tuple", [self.object_type()]) + else: + upper_bound = self.object_type() default = AnyType(TypeOfAny.from_omitted_generics) if type_param.kind == TYPE_VAR_KIND: values = [] if type_param.values: for value in type_param.values: - analyzed = self.anal_type(value) + analyzed = self.anal_type(value, allow_placeholder=True) if analyzed is None: - return None + analyzed = PlaceholderType(None, [], context.line) values.append(analyzed) return TypeVarExpr( name=type_param.name, @@ -1734,6 +1766,7 @@ def analyze_type_param(self, type_param: TypeParam) -> TypeVarLikeExpr | None: default=default, variance=VARIANCE_NOT_READY, is_new_style=True, + line=context.line, ) elif type_param.kind == PARAM_SPEC_KIND: return ParamSpecExpr( @@ -1742,6 +1775,7 @@ def analyze_type_param(self, type_param: TypeParam) -> TypeVarLikeExpr | None: upper_bound=upper_bound, default=default, is_new_style=True, + line=context.line, ) else: assert type_param.kind == TYPE_VAR_TUPLE_KIND @@ -1749,11 +1783,11 @@ def analyze_type_param(self, type_param: TypeParam) -> TypeVarLikeExpr | None: return TypeVarTupleExpr( name=type_param.name, fullname=fullname, - # Upper bound for *Ts is *tuple[object, ...], it can never be object. - upper_bound=tuple_fallback.copy_modified(), + upper_bound=upper_bound, tuple_fallback=tuple_fallback, default=default, is_new_style=True, + line=context.line, ) def pop_type_args(self, type_args: list[TypeParam] | None) -> None: @@ -3505,7 +3539,8 @@ def unwrap_final(self, s: AssignmentStmt) -> bool: if self.loop_depth[-1] > 0: self.fail("Cannot use Final inside a loop", s) if self.type and self.type.is_protocol: - self.msg.protocol_members_cant_be_final(s) + if self.is_class_scope(): + self.msg.protocol_members_cant_be_final(s) if ( isinstance(s.rvalue, TempNode) and s.rvalue.no_rhs @@ -3736,6 +3771,10 @@ def analyze_alias( last_tvar_name_with_default = tvar_def.name tvar_defs.append(tvar_def) + if python_3_12_type_alias: + with self.allow_unbound_tvars_set(): + rvalue.accept(self) + analyzed, depends_on = analyze_type_alias( typ, self, @@ -3922,13 +3961,16 @@ def check_and_set_up_type_alias(self, s: AssignmentStmt) -> bool: alias_tvars=alias_tvars, no_args=no_args, eager=eager, + python_3_12_type_alias=pep_695, ) if isinstance(s.rvalue, (IndexExpr, CallExpr, OpExpr)) and ( not isinstance(rvalue, OpExpr) or (self.options.python_version >= (3, 10) or self.is_stub_file) ): # Note: CallExpr is for "void = type(None)" and OpExpr is for "X | Y" union syntax. - s.rvalue.analyzed = TypeAliasExpr(alias_node) + if not isinstance(s.rvalue.analyzed, TypeAliasExpr): + # Any existing node will be updated in-place below. + s.rvalue.analyzed = TypeAliasExpr(alias_node) s.rvalue.analyzed.line = s.line # we use the column from resulting target, to get better location for errors s.rvalue.analyzed.column = res.column @@ -4586,7 +4628,7 @@ def process_typevar_parameters( self.fail("TypeVar cannot be both covariant and contravariant", context) return None elif num_values == 1: - self.fail("TypeVar cannot have only a single constraint", context) + self.fail(message_registry.TYPE_VAR_TOO_FEW_CONSTRAINED_TYPES, context) return None elif covariant: variance = COVARIANT @@ -5316,10 +5358,18 @@ def visit_type_alias_stmt(self, s: TypeAliasStmt) -> None: all_type_params_names = [p.name for p in s.type_args] try: + existing = self.current_symbol_table().get(s.name.name) + if existing and not ( + isinstance(existing.node, TypeAlias) + or (isinstance(existing.node, PlaceholderNode) and existing.node.line == s.line) + ): + self.already_defined(s.name.name, s, existing, "Name") + return + tag = self.track_incomplete_refs() res, alias_tvars, depends_on, qualified_tvars, empty_tuple_index = self.analyze_alias( s.name.name, - s.value, + s.value.expr(), allow_placeholder=True, declared_type_vars=type_params, all_declared_type_params_names=all_type_params_names, @@ -5368,9 +5418,9 @@ def visit_type_alias_stmt(self, s: TypeAliasStmt) -> None: alias_tvars=alias_tvars, no_args=False, eager=eager, + python_3_12_type_alias=True, ) - existing = self.current_symbol_table().get(s.name.name) if ( existing and isinstance(existing.node, (PlaceholderNode, TypeAlias)) @@ -5402,6 +5452,7 @@ def visit_type_alias_stmt(self, s: TypeAliasStmt) -> None: current_node = existing.node if existing else alias_node assert isinstance(current_node, TypeAlias) self.disable_invalid_recursive_aliases(s, current_node, s.value) + s.name.accept(self) finally: self.pop_type_args(s.type_args) @@ -5416,7 +5467,11 @@ def visit_name_expr(self, expr: NameExpr) -> None: def bind_name_expr(self, expr: NameExpr, sym: SymbolTableNode) -> None: """Bind name expression to a symbol table node.""" - if isinstance(sym.node, TypeVarExpr) and self.tvar_scope.get_binding(sym): + if ( + isinstance(sym.node, TypeVarExpr) + and self.tvar_scope.get_binding(sym) + and not self.allow_unbound_tvars + ): self.fail(f'"{expr.name}" is a type variable and only valid in type context', expr) elif isinstance(sym.node, PlaceholderNode): self.process_placeholder(expr.name, "name", expr) @@ -6994,7 +7049,7 @@ def in_checked_function(self) -> bool: def fail( self, - msg: str, + msg: str | ErrorMessage, ctx: Context, serious: bool = False, *, @@ -7005,6 +7060,10 @@ def fail( return # In case it's a bug and we don't really have context assert ctx is not None, msg + if isinstance(msg, ErrorMessage): + if code is None: + code = msg.code + msg = msg.value self.errors.report(ctx.line, ctx.column, msg, blocker=blocker, code=code) def note(self, msg: str, ctx: Context, code: ErrorCode | None = None) -> None: @@ -7135,7 +7194,6 @@ def anal_type( report_invalid_types: bool = True, prohibit_self_type: str | None = None, allow_type_any: bool = False, - third_pass: bool = False, ) -> Type | None: """Semantically analyze a type. @@ -7143,8 +7201,6 @@ def anal_type( typ: Type to analyze (if already analyzed, this is a no-op) allow_placeholder: If True, may return PlaceholderType if encountering an incomplete definition - third_pass: Unused; only for compatibility with old semantic - analyzer Return None only if some part of the type couldn't be bound *and* it referred to an incomplete namespace or definition. In this case also diff --git a/mypy/semanal_namedtuple.py b/mypy/semanal_namedtuple.py index 753deafe103b2..768dd265b3383 100644 --- a/mypy/semanal_namedtuple.py +++ b/mypy/semanal_namedtuple.py @@ -62,6 +62,7 @@ Type, TypeOfAny, TypeType, + TypeVarId, TypeVarLikeType, TypeVarType, UnboundType, @@ -569,27 +570,33 @@ def add_field( add_field(Var("__match_args__", match_args_type), is_initialized_in_class=True) assert info.tuple_type is not None # Set by update_tuple_type() above. - tvd = TypeVarType( + shared_self_type = TypeVarType( name=SELF_TVAR_NAME, - fullname=info.fullname + "." + SELF_TVAR_NAME, + fullname=f"{info.fullname}.{SELF_TVAR_NAME}", + # Namespace is patched per-method below. id=self.api.tvar_scope.new_unique_func_id(), values=[], upper_bound=info.tuple_type, default=AnyType(TypeOfAny.from_omitted_generics), ) - selftype = tvd def add_method( funcname: str, - ret: Type, + ret: Type | None, # None means use (patched) self-type args: list[Argument], is_classmethod: bool = False, is_new: bool = False, ) -> None: + fullname = f"{info.fullname}.{funcname}" + self_type = shared_self_type.copy_modified( + id=TypeVarId(shared_self_type.id.raw_id, namespace=fullname) + ) + if ret is None: + ret = self_type if is_classmethod or is_new: - first = [Argument(Var("_cls"), TypeType.make_normalized(selftype), None, ARG_POS)] + first = [Argument(Var("_cls"), TypeType.make_normalized(self_type), None, ARG_POS)] else: - first = [Argument(Var("_self"), selftype, None, ARG_POS)] + first = [Argument(Var("_self"), self_type, None, ARG_POS)] args = first + args types = [arg.type_annotation for arg in args] @@ -597,12 +604,12 @@ def add_method( arg_kinds = [arg.kind for arg in args] assert None not in types signature = CallableType(cast(List[Type], types), arg_kinds, items, ret, function_type) - signature.variables = [tvd] + signature.variables = [self_type] func = FuncDef(funcname, args, Block([])) func.info = info func.is_class = is_classmethod func.type = set_callable_name(signature, func) - func._fullname = info.fullname + "." + funcname + func._fullname = fullname func.line = line if is_classmethod: v = Var(funcname, func.type) @@ -620,13 +627,13 @@ def add_method( add_method( "_replace", - ret=selftype, + ret=None, args=[Argument(var, var.type, EllipsisExpr(), ARG_NAMED_OPT) for var in vars], ) if self.options.python_version >= (3, 13): add_method( "__replace__", - ret=selftype, + ret=None, args=[Argument(var, var.type, EllipsisExpr(), ARG_NAMED_OPT) for var in vars], ) @@ -635,11 +642,11 @@ def make_init_arg(var: Var) -> Argument: kind = ARG_POS if default is None else ARG_OPT return Argument(var, var.type, default, kind) - add_method("__new__", ret=selftype, args=[make_init_arg(var) for var in vars], is_new=True) + add_method("__new__", ret=None, args=[make_init_arg(var) for var in vars], is_new=True) add_method("_asdict", args=[], ret=ordereddictype) add_method( "_make", - ret=selftype, + ret=None, is_classmethod=True, args=[Argument(Var("iterable", iterable_type), iterable_type, None, ARG_POS)], ) diff --git a/mypy/semanal_shared.py b/mypy/semanal_shared.py index b5ec2bb52a0d3..db19f074911f5 100644 --- a/mypy/semanal_shared.py +++ b/mypy/semanal_shared.py @@ -8,7 +8,6 @@ from mypy_extensions import trait -from mypy import join from mypy.errorcodes import LITERAL_REQ, ErrorCode from mypy.nodes import ( CallExpr, @@ -30,6 +29,7 @@ from mypy.plugin import SemanticAnalyzerPluginInterface from mypy.tvar_scope import TypeVarLikeScope from mypy.type_visitor import ANY_STRATEGY, BoolTypeQuery +from mypy.typeops import make_simplified_union from mypy.types import ( TPDICT_FB_NAMES, AnyType, @@ -58,7 +58,7 @@ # Priorities for ordering of patches within the "patch" phase of semantic analysis # (after the main pass): -# Fix fallbacks (does joins) +# Fix fallbacks (does subtype checks). PRIORITY_FALLBACKS: Final = 1 @@ -304,7 +304,7 @@ def calculate_tuple_fallback(typ: TupleType) -> None: raise NotImplementedError else: items.append(item) - fallback.args = (join.join_type_list(items),) + fallback.args = (make_simplified_union(items),) class _NamedTypeCallback(Protocol): @@ -314,7 +314,7 @@ def __call__(self, fully_qualified_name: str, args: list[Type] | None = None) -> def paramspec_args( name: str, fullname: str, - id: TypeVarId | int, + id: TypeVarId, *, named_type_func: _NamedTypeCallback, line: int = -1, @@ -337,7 +337,7 @@ def paramspec_args( def paramspec_kwargs( name: str, fullname: str, - id: TypeVarId | int, + id: TypeVarId, *, named_type_func: _NamedTypeCallback, line: int = -1, diff --git a/mypy/semanal_typeargs.py b/mypy/semanal_typeargs.py index 15ea15d612c08..dbf5136afa1b7 100644 --- a/mypy/semanal_typeargs.py +++ b/mypy/semanal_typeargs.py @@ -12,9 +12,10 @@ from mypy import errorcodes as codes, message_registry from mypy.errorcodes import ErrorCode from mypy.errors import Errors +from mypy.message_registry import INVALID_PARAM_SPEC_LOCATION, INVALID_PARAM_SPEC_LOCATION_NOTE from mypy.messages import format_type from mypy.mixedtraverser import MixedTraverserVisitor -from mypy.nodes import ARG_STAR, Block, ClassDef, Context, FakeInfo, FuncItem, MypyFile +from mypy.nodes import Block, ClassDef, Context, FakeInfo, FuncItem, MypyFile from mypy.options import Options from mypy.scope import Scope from mypy.subtypes import is_same_type, is_subtype @@ -103,15 +104,7 @@ def visit_tuple_type(self, t: TupleType) -> None: def visit_callable_type(self, t: CallableType) -> None: super().visit_callable_type(t) - # Normalize trivial unpack in var args as *args: *tuple[X, ...] -> *args: X - if t.is_var_arg: - star_index = t.arg_kinds.index(ARG_STAR) - star_type = t.arg_types[star_index] - if isinstance(star_type, UnpackType): - p_type = get_proper_type(star_type.type) - if isinstance(p_type, Instance): - assert p_type.type.fullname == "builtins.tuple" - t.arg_types[star_index] = p_type.args[0] + t.normalize_trivial_unpack() def visit_instance(self, t: Instance) -> None: super().visit_instance(t) @@ -146,13 +139,25 @@ def validate_args( for (i, arg), tvar in zip(enumerate(args), type_vars): if isinstance(tvar, TypeVarType): if isinstance(arg, ParamSpecType): - # TODO: Better message is_error = True - self.fail(f'Invalid location for ParamSpec "{arg.name}"', ctx) + self.fail( + INVALID_PARAM_SPEC_LOCATION.format(format_type(arg, self.options)), + ctx, + code=codes.VALID_TYPE, + ) self.note( - "You can use ParamSpec as the first argument to Callable, e.g., " - "'Callable[{}, int]'".format(arg.name), + INVALID_PARAM_SPEC_LOCATION_NOTE.format(arg.name), + ctx, + code=codes.VALID_TYPE, + ) + continue + if isinstance(arg, Parameters): + is_error = True + self.fail( + f"Cannot use {format_type(arg, self.options)} for regular type variable," + " only for ParamSpec", ctx, + code=codes.VALID_TYPE, ) continue if tvar.values: @@ -204,6 +209,7 @@ def validate_args( "Can only replace ParamSpec with a parameter types list or" f" another ParamSpec, got {format_type(arg, self.options)}", ctx, + code=codes.VALID_TYPE, ) return is_error diff --git a/mypy/solve.py b/mypy/solve.py index 9770364bf8920..bb87b6576ada2 100644 --- a/mypy/solve.py +++ b/mypy/solve.py @@ -514,7 +514,8 @@ def skip_reverse_union_constraints(cs: list[Constraint]) -> list[Constraint]: is a linear constraint. This is however not true in presence of union types, for example T :> Union[S, int] vs S <: T. Trying to solve such constraints would be detected ambiguous as (T, S) form a non-linear SCC. However, simply removing the linear part results in a valid - solution T = Union[S, int], S = . + solution T = Union[S, int], S = . A similar scenario is when we get T <: Union[T, int], + such constraints carry no information, and will equally confuse linearity check. TODO: a cleaner solution may be to avoid inferring such constraints in first place, but this would require passing around a flag through all infer_constraints() calls. @@ -525,7 +526,13 @@ def skip_reverse_union_constraints(cs: list[Constraint]) -> list[Constraint]: if isinstance(p_target, UnionType): for item in p_target.items: if isinstance(item, TypeVarType): + if item == c.origin_type_var and c.op == SUBTYPE_OF: + reverse_union_cs.add(c) + continue + # These two forms are semantically identical, but are different from + # the point of view of Constraint.__eq__(). reverse_union_cs.add(Constraint(item, neg_op(c.op), c.origin_type_var)) + reverse_union_cs.add(Constraint(c.origin_type_var, c.op, item)) return [c for c in cs if c not in reverse_union_cs] diff --git a/mypy/stubgen.py b/mypy/stubgen.py index 22028694ad6b2..8478bd2135e4c 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -314,6 +314,8 @@ def visit_index_expr(self, node: IndexExpr) -> str: return " | ".join([item.accept(self) for item in node.index.items]) return node.index.accept(self) if base_fullname == "typing.Optional": + if isinstance(node.index, TupleExpr): + return self.stubgen.add_name("_typeshed.Incomplete") return f"{node.index.accept(self)} | None" base = node.base.accept(self) index = node.index.accept(self) @@ -1060,6 +1062,10 @@ def is_alias_expression(self, expr: Expression, top_level: bool = True) -> bool: else: return False return all(self.is_alias_expression(i, top_level=False) for i in indices) + elif isinstance(expr, OpExpr) and expr.op == "|": + return self.is_alias_expression( + expr.left, top_level=False + ) and self.is_alias_expression(expr.right, top_level=False) else: return False diff --git a/mypy/stubgenc.py b/mypy/stubgenc.py index 7e3ef49c6e9a5..bacb68f6d1c73 100755 --- a/mypy/stubgenc.py +++ b/mypy/stubgenc.py @@ -178,7 +178,7 @@ def generate_stub_for_c_module( gen.generate_module() output = gen.output() - with open(target, "w") as file: + with open(target, "w", encoding="utf-8") as file: file.write(output) @@ -733,7 +733,7 @@ def generate_property_stub( def get_type_fullname(self, typ: type) -> str: """Given a type, return a string representation""" - if typ is Any: + if typ is Any: # type: ignore[comparison-overlap] return "Any" typename = getattr(typ, "__qualname__", typ.__name__) module_name = self.get_obj_module(typ) diff --git a/mypy/stubutil.py b/mypy/stubutil.py index 8e41d68625319..2f2db0dbbe535 100644 --- a/mypy/stubutil.py +++ b/mypy/stubutil.py @@ -257,7 +257,9 @@ def visit_unbound_type(self, t: UnboundType) -> str: if fullname == "typing.Union": return " | ".join([item.accept(self) for item in t.args]) if fullname == "typing.Optional": - return f"{t.args[0].accept(self)} | None" + if len(t.args) == 1: + return f"{t.args[0].accept(self)} | None" + return self.stubgen.add_name("_typeshed.Incomplete") if fullname in TYPING_BUILTIN_REPLACEMENTS: s = self.stubgen.add_name(TYPING_BUILTIN_REPLACEMENTS[fullname], require=True) if self.known_modules is not None and "." in s: diff --git a/mypy/subtypes.py b/mypy/subtypes.py index a5523fbe0d451..649cbae4c8318 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -8,7 +8,12 @@ import mypy.constraints import mypy.typeops from mypy.erasetype import erase_type -from mypy.expandtype import expand_self_type, expand_type, expand_type_by_instance +from mypy.expandtype import ( + expand_self_type, + expand_type, + expand_type_by_instance, + freshen_function_type_vars, +) from mypy.maptype import map_instance_to_supertype # Circular import; done in the function instead. @@ -87,8 +92,8 @@ def __init__( ignore_pos_arg_names: bool = False, ignore_declared_variance: bool = False, # Supported for both proper and non-proper + always_covariant: bool = False, ignore_promotions: bool = False, - ignore_uninhabited: bool = False, # Proper subtype flags erase_instances: bool = False, keep_erased_types: bool = False, @@ -97,8 +102,8 @@ def __init__( self.ignore_type_params = ignore_type_params self.ignore_pos_arg_names = ignore_pos_arg_names self.ignore_declared_variance = ignore_declared_variance + self.always_covariant = always_covariant self.ignore_promotions = ignore_promotions - self.ignore_uninhabited = ignore_uninhabited self.erase_instances = erase_instances self.keep_erased_types = keep_erased_types self.options = options @@ -120,8 +125,8 @@ def is_subtype( ignore_type_params: bool = False, ignore_pos_arg_names: bool = False, ignore_declared_variance: bool = False, + always_covariant: bool = False, ignore_promotions: bool = False, - ignore_uninhabited: bool = False, options: Options | None = None, ) -> bool: """Is 'left' subtype of 'right'? @@ -140,8 +145,8 @@ def is_subtype( ignore_type_params=ignore_type_params, ignore_pos_arg_names=ignore_pos_arg_names, ignore_declared_variance=ignore_declared_variance, + always_covariant=always_covariant, ignore_promotions=ignore_promotions, - ignore_uninhabited=ignore_uninhabited, options=options, ) else: @@ -150,8 +155,8 @@ def is_subtype( ignore_type_params, ignore_pos_arg_names, ignore_declared_variance, + always_covariant, ignore_promotions, - ignore_uninhabited, options, } ), "Don't pass both context and individual flags" @@ -186,7 +191,6 @@ def is_proper_subtype( *, subtype_context: SubtypeContext | None = None, ignore_promotions: bool = False, - ignore_uninhabited: bool = False, erase_instances: bool = False, keep_erased_types: bool = False, ) -> bool: @@ -202,19 +206,12 @@ def is_proper_subtype( if subtype_context is None: subtype_context = SubtypeContext( ignore_promotions=ignore_promotions, - ignore_uninhabited=ignore_uninhabited, erase_instances=erase_instances, keep_erased_types=keep_erased_types, ) else: assert not any( - { - ignore_promotions, - ignore_uninhabited, - erase_instances, - keep_erased_types, - ignore_uninhabited, - } + {ignore_promotions, erase_instances, keep_erased_types} ), "Don't pass both context and individual flags" if type_state.is_assumed_proper_subtype(left, right): return True @@ -404,6 +401,7 @@ def build_subtype_kind(subtype_context: SubtypeContext, proper_subtype: bool) -> subtype_context.ignore_type_params, subtype_context.ignore_pos_arg_names, subtype_context.ignore_declared_variance, + subtype_context.always_covariant, subtype_context.ignore_promotions, subtype_context.erase_instances, subtype_context.keep_erased_types, @@ -442,11 +440,7 @@ def visit_none_type(self, left: NoneType) -> bool: return True def visit_uninhabited_type(self, left: UninhabitedType) -> bool: - # We ignore this for unsafe overload checks, so that and empty list and - # a list of int will be considered non-overlapping. - if isinstance(self.right, UninhabitedType): - return True - return not self.subtype_context.ignore_uninhabited + return True def visit_erased_type(self, left: ErasedType) -> bool: # This may be encountered during type inference. The result probably doesn't @@ -585,12 +579,15 @@ def visit_instance(self, left: Instance) -> bool: if tvar.variance == VARIANCE_NOT_READY and not tried_infer: infer_class_variances(right.type) tried_infer = True + if ( + self.subtype_context.always_covariant + and tvar.variance == INVARIANT + ): + variance = COVARIANT + else: + variance = tvar.variance if not check_type_parameter( - lefta, - righta, - tvar.variance, - self.proper_subtype, - self.subtype_context, + lefta, righta, variance, self.proper_subtype, self.subtype_context ): nominal = False else: @@ -682,6 +679,8 @@ def visit_parameters(self, left: Parameters) -> bool: is_proper_subtype=False, ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, ) + elif isinstance(self.right, Instance): + return self.right.type.fullname == "builtins.object" else: return False @@ -794,15 +793,18 @@ def visit_tuple_type(self, left: TupleType) -> bool: return False if any(not self._is_subtype(l, r) for l, r in zip(left.items, right.items)): return False - rfallback = mypy.typeops.tuple_fallback(right) - if is_named_instance(rfallback, "builtins.tuple"): + if is_named_instance(right.partial_fallback, "builtins.tuple"): # No need to verify fallback. This is useful since the calculated fallback # may be inconsistent due to how we calculate joins between unions vs. # non-unions. For example, join(int, str) == object, whereas # join(Union[int, C], Union[str, C]) == Union[int, str, C]. return True - lfallback = mypy.typeops.tuple_fallback(left) - return self._is_subtype(lfallback, rfallback) + if is_named_instance(left.partial_fallback, "builtins.tuple"): + # Again, no need to verify. At this point we know the right fallback + # is a subclass of tuple, so if left is plain tuple, it cannot be a subtype. + return False + # At this point we know both fallbacks are non-tuple. + return self._is_subtype(left.partial_fallback, right.partial_fallback) else: return False @@ -944,7 +946,7 @@ def visit_overloaded(self, left: Overloaded) -> bool: # When it is the same overload, then the types are equal. return True - # Ensure each overload in the right side (the supertype) is accounted for. + # Ensure each overload on the right side (the supertype) is accounted for. previous_match_left_index = -1 matched_overloads = set() @@ -1409,7 +1411,6 @@ def is_callable_compatible( check_args_covariantly: bool = False, allow_partial_overlap: bool = False, strict_concatenate: bool = False, - no_unify_none: bool = False, ) -> bool: """Is the left compatible with the right, using the provided compatibility check? @@ -1430,7 +1431,7 @@ def is_callable_compatible( configurable. For example, when checking the validity of overloads, it's useful to see if - the first overload alternative has more precise arguments then the second. + the first overload alternative has more precise arguments than the second. We would want to check the arguments covariantly in that case. Note! The following two function calls are NOT equivalent: @@ -1526,26 +1527,11 @@ def g(x: int) -> int: ... # (below) treats type variables on the two sides as independent. if left.variables: # Apply generic type variables away in left via type inference. - unified = unify_generic_callable( - left, right, ignore_return=ignore_return, no_unify_none=no_unify_none - ) + unified = unify_generic_callable(left, right, ignore_return=ignore_return) if unified is None: return False left = unified - # If we allow partial overlaps, we don't need to leave R generic: - # if we can find even just a single typevar assignment which - # would make these callables compatible, we should return True. - - # So, we repeat the above checks in the opposite direction. This also - # lets us preserve the 'symmetry' property of allow_partial_overlap. - if allow_partial_overlap and right.variables: - unified = unify_generic_callable( - right, left, ignore_return=ignore_return, no_unify_none=no_unify_none - ) - if unified is not None: - right = unified - # Check return types. if not ignore_return and not is_compat_return(left.ret_type, right.ret_type): return False @@ -1792,7 +1778,9 @@ def are_args_compatible( # If both arguments are required allow_partial_overlap has no effect. allow_partial_overlap = False - def is_different(left_item: object | None, right_item: object | None) -> bool: + def is_different( + left_item: object | None, right_item: object | None, allow_overlap: bool + ) -> bool: """Checks if the left and right items are different. If the right item is unspecified (e.g. if the right callable doesn't care @@ -1802,19 +1790,21 @@ def is_different(left_item: object | None, right_item: object | None) -> bool: if the left callable also doesn't care.""" if right_item is None: return False - if allow_partial_overlap and left_item is None: + if allow_overlap and left_item is None: return False return left_item != right_item # If right has a specific name it wants this argument to be, left must # have the same. - if is_different(left.name, right.name): + if is_different(left.name, right.name, allow_partial_overlap): # But pay attention to whether we're ignoring positional arg names if not ignore_pos_arg_names or right.pos is None: return False - # If right is at a specific position, left must have the same: - if is_different(left.pos, right.pos) and not allow_imprecise_kinds: + # If right is at a specific position, left must have the same. + # TODO: partial overlap logic is flawed for positions. + # We disable it to avoid false positives at a cost of few false negatives. + if is_different(left.pos, right.pos, allow_overlap=False) and not allow_imprecise_kinds: return False # If right's argument is optional, left's must also be @@ -1844,8 +1834,6 @@ def unify_generic_callable( target: NormalizedCallableType, ignore_return: bool, return_constraint_direction: int | None = None, - *, - no_unify_none: bool = False, ) -> NormalizedCallableType | None: """Try to unify a generic callable type with another callable type. @@ -1853,6 +1841,11 @@ def unify_generic_callable( """ import mypy.solve + if set(type.type_var_ids()) & {v.id for v in mypy.typeops.get_all_type_vars(target)}: + # Overload overlap check does nasty things like unifying in opposite direction. + # This can easily create type variable clashes, so we need to refresh. + type = freshen_function_type_vars(type) + if return_constraint_direction is None: return_constraint_direction = mypy.constraints.SUBTYPE_OF @@ -1871,11 +1864,9 @@ def unify_generic_callable( type.ret_type, target.ret_type, return_constraint_direction ) constraints.extend(c) - if no_unify_none: - constraints = [ - c for c in constraints if not isinstance(get_proper_type(c.target), NoneType) - ] - inferred_vars, _ = mypy.solve.solve_constraints(type.variables, constraints) + inferred_vars, _ = mypy.solve.solve_constraints( + type.variables, constraints, allow_polymorphic=True + ) if None in inferred_vars: return None non_none_inferred_vars = cast(List[Type], inferred_vars) diff --git a/mypy/test/testdeps.py b/mypy/test/testdeps.py index f9a059672de8d..7c845eab8b572 100644 --- a/mypy/test/testdeps.py +++ b/mypy/test/testdeps.py @@ -3,8 +3,11 @@ from __future__ import annotations import os +import sys from collections import defaultdict +import pytest + from mypy import build from mypy.errors import CompileError from mypy.modulefinder import BuildSource @@ -28,6 +31,8 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: src = "\n".join(testcase.input) dump_all = "# __dump_all__" in src options = parse_options(src, testcase, incremental_step=1) + if options.python_version > sys.version_info: + pytest.skip("Test case requires a newer Python version") options.use_builtins_fixtures = True options.show_traceback = True options.cache_dir = os.devnull diff --git a/mypy/test/testdiff.py b/mypy/test/testdiff.py index 5e2e0bc2ca5a5..0559b33c33e2a 100644 --- a/mypy/test/testdiff.py +++ b/mypy/test/testdiff.py @@ -3,9 +3,11 @@ from __future__ import annotations import os +import sys + +import pytest from mypy import build -from mypy.defaults import PYTHON3_VERSION from mypy.errors import CompileError from mypy.modulefinder import BuildSource from mypy.nodes import MypyFile @@ -24,6 +26,8 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: files_dict = dict(testcase.files) second_src = files_dict["tmp/next.py"] options = parse_options(first_src, testcase, 1) + if options.python_version > sys.version_info: + pytest.skip("Test case requires a newer Python version") messages1, files1 = self.build(first_src, options) messages2, files2 = self.build(second_src, options) @@ -53,7 +57,6 @@ def build(self, source: str, options: Options) -> tuple[list[str], dict[str, Myp options.use_builtins_fixtures = True options.show_traceback = True options.cache_dir = os.devnull - options.python_version = PYTHON3_VERSION options.allow_empty_bodies = True try: result = build.build( diff --git a/mypy/test/testfinegrained.py b/mypy/test/testfinegrained.py index f61a58c425fc6..800ba2dff0878 100644 --- a/mypy/test/testfinegrained.py +++ b/mypy/test/testfinegrained.py @@ -16,6 +16,7 @@ import os import re +import sys import unittest from typing import Any @@ -82,6 +83,9 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: f.write(main_src) options = self.get_options(main_src, testcase, build_cache=False) + if options.python_version > sys.version_info: + pytest.skip("Test case requires a newer Python version") + build_options = self.get_options(main_src, testcase, build_cache=True) server = Server(options, DEFAULT_STATUS_FILE) diff --git a/mypy/test/testtypes.py b/mypy/test/testtypes.py index b3f84905c47e4..0218d33cc1244 100644 --- a/mypy/test/testtypes.py +++ b/mypy/test/testtypes.py @@ -144,7 +144,11 @@ def test_tuple_type_upper(self) -> None: def test_type_variable_binding(self) -> None: assert_equal( - str(TypeVarType("X", "X", 1, [], self.fx.o, AnyType(TypeOfAny.from_omitted_generics))), + str( + TypeVarType( + "X", "X", TypeVarId(1), [], self.fx.o, AnyType(TypeOfAny.from_omitted_generics) + ) + ), "X`1", ) assert_equal( @@ -152,7 +156,7 @@ def test_type_variable_binding(self) -> None: TypeVarType( "X", "X", - 1, + TypeVarId(1), [self.x, self.y], self.fx.o, AnyType(TypeOfAny.from_omitted_generics), @@ -170,14 +174,25 @@ def test_generic_function_type(self) -> None: self.function, name=None, variables=[ - TypeVarType("X", "X", -1, [], self.fx.o, AnyType(TypeOfAny.from_omitted_generics)) + TypeVarType( + "X", + "X", + TypeVarId(-1), + [], + self.fx.o, + AnyType(TypeOfAny.from_omitted_generics), + ) ], ) assert_equal(str(c), "def [X] (X?, Y?) -> Y?") v = [ - TypeVarType("Y", "Y", -1, [], self.fx.o, AnyType(TypeOfAny.from_omitted_generics)), - TypeVarType("X", "X", -2, [], self.fx.o, AnyType(TypeOfAny.from_omitted_generics)), + TypeVarType( + "Y", "Y", TypeVarId(-1), [], self.fx.o, AnyType(TypeOfAny.from_omitted_generics) + ), + TypeVarType( + "X", "X", TypeVarId(-2), [], self.fx.o, AnyType(TypeOfAny.from_omitted_generics) + ), ] c2 = CallableType([], [], [], NoneType(), self.function, name=None, variables=v) assert_equal(str(c2), "def [Y, X] ()") @@ -205,7 +220,9 @@ def test_type_alias_expand_all(self) -> None: def test_recursive_nested_in_non_recursive(self) -> None: A, _ = self.fx.def_alias_1(self.fx.a) - T = TypeVarType("T", "T", -1, [], self.fx.o, AnyType(TypeOfAny.from_omitted_generics)) + T = TypeVarType( + "T", "T", TypeVarId(-1), [], self.fx.o, AnyType(TypeOfAny.from_omitted_generics) + ) NA = self.fx.non_rec_alias(Instance(self.fx.gi, [T]), [T], [A]) assert not NA.is_recursive assert has_recursive_types(NA) @@ -657,7 +674,9 @@ def callable(self, vars: list[str], *a: Type) -> CallableType: n = -1 for v in vars: tv.append( - TypeVarType(v, v, n, [], self.fx.o, AnyType(TypeOfAny.from_omitted_generics)) + TypeVarType( + v, v, TypeVarId(n), [], self.fx.o, AnyType(TypeOfAny.from_omitted_generics) + ) ) n -= 1 return CallableType( diff --git a/mypy/test/typefixture.py b/mypy/test/typefixture.py index b7bde16e6be2a..5a813f70117c6 100644 --- a/mypy/test/typefixture.py +++ b/mypy/test/typefixture.py @@ -30,6 +30,7 @@ TypeAliasType, TypeOfAny, TypeType, + TypeVarId, TypeVarLikeType, TypeVarTupleType, TypeVarType, @@ -57,7 +58,7 @@ def make_type_var( return TypeVarType( name, name, - id, + TypeVarId(id), values, upper_bound, AnyType(TypeOfAny.from_omitted_generics), @@ -227,7 +228,7 @@ def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleTy return TypeVarTupleType( name, name, - id, + TypeVarId(id), upper_bound, self.std_tuple, AnyType(TypeOfAny.from_omitted_generics), @@ -325,7 +326,7 @@ def make_type_info( TypeVarTupleType( n, n, - id, + TypeVarId(id), self.std_tuple.copy_modified(args=[self.o]), self.std_tuple.copy_modified(args=[self.o]), AnyType(TypeOfAny.from_omitted_generics), @@ -340,7 +341,7 @@ def make_type_info( TypeVarType( n, n, - id, + TypeVarId(id), [], self.o, AnyType(TypeOfAny.from_omitted_generics), diff --git a/mypy/traverser.py b/mypy/traverser.py index 225de27e70022..6f162c9ec5760 100644 --- a/mypy/traverser.py +++ b/mypy/traverser.py @@ -246,7 +246,6 @@ def visit_match_stmt(self, o: MatchStmt) -> None: def visit_type_alias_stmt(self, o: TypeAliasStmt) -> None: o.name.accept(self) - # TODO: params o.value.accept(self) def visit_member_expr(self, o: MemberExpr) -> None: diff --git a/mypy/tvar_scope.py b/mypy/tvar_scope.py index 4dc663df0399e..fe97a8359287c 100644 --- a/mypy/tvar_scope.py +++ b/mypy/tvar_scope.py @@ -85,29 +85,27 @@ def allow_binding(self, fullname: str) -> bool: return False return True - def method_frame(self) -> TypeVarLikeScope: + def method_frame(self, namespace: str) -> TypeVarLikeScope: """A new scope frame for binding a method""" - return TypeVarLikeScope(self, False, None) + return TypeVarLikeScope(self, False, None, namespace=namespace) def class_frame(self, namespace: str) -> TypeVarLikeScope: """A new scope frame for binding a class. Prohibits *this* class's tvars""" return TypeVarLikeScope(self.get_function_scope(), True, self, namespace=namespace) - def new_unique_func_id(self) -> int: + def new_unique_func_id(self) -> TypeVarId: """Used by plugin-like code that needs to make synthetic generic functions.""" self.func_id -= 1 - return self.func_id + return TypeVarId(self.func_id) def bind_new(self, name: str, tvar_expr: TypeVarLikeExpr) -> TypeVarLikeType: if self.is_class_scope: self.class_id += 1 i = self.class_id - namespace = self.namespace else: self.func_id -= 1 i = self.func_id - # TODO: Consider also using namespaces for functions - namespace = "" + namespace = self.namespace tvar_expr.default.accept(TypeVarLikeNamespaceSetter(namespace)) if isinstance(tvar_expr, TypeVarExpr): @@ -124,9 +122,9 @@ def bind_new(self, name: str, tvar_expr: TypeVarLikeExpr) -> TypeVarLikeType: ) elif isinstance(tvar_expr, ParamSpecExpr): tvar_def = ParamSpecType( - name, - tvar_expr.fullname, - i, + name=name, + fullname=tvar_expr.fullname, + id=TypeVarId(i, namespace=namespace), flavor=ParamSpecFlavor.BARE, upper_bound=tvar_expr.upper_bound, default=tvar_expr.default, @@ -135,9 +133,9 @@ def bind_new(self, name: str, tvar_expr: TypeVarLikeExpr) -> TypeVarLikeType: ) elif isinstance(tvar_expr, TypeVarTupleExpr): tvar_def = TypeVarTupleType( - name, - tvar_expr.fullname, - i, + name=name, + fullname=tvar_expr.fullname, + id=TypeVarId(i, namespace=namespace), upper_bound=tvar_expr.upper_bound, tuple_fallback=tvar_expr.tuple_fallback, default=tvar_expr.default, diff --git a/mypy/type_visitor.py b/mypy/type_visitor.py index a6ae77832cebc..d0876629fc082 100644 --- a/mypy/type_visitor.py +++ b/mypy/type_visitor.py @@ -266,7 +266,12 @@ def visit_literal_type(self, t: LiteralType) -> Type: return LiteralType(value=t.value, fallback=fallback, line=t.line, column=t.column) def visit_union_type(self, t: UnionType) -> Type: - return UnionType(self.translate_types(t.items), t.line, t.column) + return UnionType( + self.translate_types(t.items), + t.line, + t.column, + uses_pep604_syntax=t.uses_pep604_syntax, + ) def translate_types(self, types: Iterable[Type]) -> list[Type]: return [t.accept(self) for t in types] diff --git a/mypy/typeanal.py b/mypy/typeanal.py index bf53204ffce96..6651af7dad4f0 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -10,7 +10,14 @@ from mypy import errorcodes as codes, message_registry, nodes from mypy.errorcodes import ErrorCode from mypy.expandtype import expand_type -from mypy.messages import MessageBuilder, format_type_bare, quote_type_string, wrong_type_arg_count +from mypy.message_registry import INVALID_PARAM_SPEC_LOCATION, INVALID_PARAM_SPEC_LOCATION_NOTE +from mypy.messages import ( + MessageBuilder, + format_type, + format_type_bare, + quote_type_string, + wrong_type_arg_count, +) from mypy.nodes import ( ARG_NAMED, ARG_NAMED_OPT, @@ -93,7 +100,6 @@ callable_with_ellipsis, find_unpack_in_list, flatten_nested_tuples, - flatten_nested_unions, get_proper_type, has_type_vars, ) @@ -1027,10 +1033,13 @@ def visit_unpack_type(self, t: UnpackType) -> Type: def visit_parameters(self, t: Parameters) -> Type: raise NotImplementedError("ParamSpec literals cannot have unbound TypeVars") - def visit_callable_type(self, t: CallableType, nested: bool = True) -> Type: + def visit_callable_type( + self, t: CallableType, nested: bool = True, namespace: str = "" + ) -> Type: # Every Callable can bind its own type variables, if they're not in the outer scope - with self.tvar_scope_frame(): - unpacked_kwargs = False + # TODO: attach namespace for nested free type variables (these appear in return type only). + with self.tvar_scope_frame(namespace=namespace): + unpacked_kwargs = t.unpack_kwargs if self.defining_alias: variables = t.variables else: @@ -1262,7 +1271,7 @@ def visit_union_type(self, t: UnionType) -> Type: and not self.options.python_version >= (3, 10) ): self.fail("X | Y syntax for unions requires Python 3.10", t, code=codes.SYNTAX) - return UnionType(self.anal_array(t.items), t.line) + return UnionType(self.anal_array(t.items), t.line, uses_pep604_syntax=t.uses_pep604_syntax) def visit_partial_type(self, t: PartialType) -> Type: assert False, "Internal error: Unexpected partial type" @@ -1432,7 +1441,7 @@ def analyze_callable_type(self, t: UnboundType) -> Type: ) else: # Callable[P, RET] (where P is ParamSpec) - with self.tvar_scope_frame(): + with self.tvar_scope_frame(namespace=""): # Temporarily bind ParamSpecs to allow code like this: # my_fun: Callable[Q, Foo[Q]] # We usually do this later in visit_callable_type(), but the analysis @@ -1648,9 +1657,9 @@ def note(self, msg: str, ctx: Context, *, code: ErrorCode | None = None) -> None self.note_func(msg, ctx, code=code) @contextmanager - def tvar_scope_frame(self) -> Iterator[None]: + def tvar_scope_frame(self, namespace: str) -> Iterator[None]: old_scope = self.tvar_scope - self.tvar_scope = self.tvar_scope.method_frame() + self.tvar_scope = self.tvar_scope.method_frame(namespace) yield self.tvar_scope = old_scope @@ -1780,12 +1789,14 @@ def anal_type( analyzed = AnyType(TypeOfAny.from_error) else: self.fail( - f'Invalid location for ParamSpec "{analyzed.name}"', t, code=codes.VALID_TYPE + INVALID_PARAM_SPEC_LOCATION.format(format_type(analyzed, self.options)), + t, + code=codes.VALID_TYPE, ) self.note( - "You can use ParamSpec as the first argument to Callable, e.g., " - "'Callable[{}, int]'".format(analyzed.name), + INVALID_PARAM_SPEC_LOCATION_NOTE.format(analyzed.name), t, + code=codes.VALID_TYPE, ) analyzed = AnyType(TypeOfAny.from_error) return analyzed @@ -1795,7 +1806,7 @@ def anal_var_def(self, var_def: TypeVarLikeType) -> TypeVarLikeType: return TypeVarType( name=var_def.name, fullname=var_def.fullname, - id=var_def.id.raw_id, + id=var_def.id, values=self.anal_array(var_def.values), upper_bound=var_def.upper_bound.accept(self), default=var_def.default.accept(self), @@ -2334,16 +2345,11 @@ def make_optional_type(t: Type) -> Type: is called during semantic analysis and simplification only works during type checking. """ - p_t = get_proper_type(t) - if isinstance(p_t, NoneType): + if isinstance(t, ProperType) and isinstance(t, NoneType): return t - elif isinstance(p_t, UnionType): + elif isinstance(t, ProperType) and isinstance(t, UnionType): # Eagerly expanding aliases is not safe during semantic analysis. - items = [ - item - for item in flatten_nested_unions(p_t.items, handle_type_alias_type=False) - if not isinstance(get_proper_type(item), NoneType) - ] + items = [item for item in t.items if not isinstance(get_proper_type(item), NoneType)] return UnionType(items + [NoneType()], t.line, t.column) else: return UnionType([t, NoneType()], t.line, t.column) @@ -2370,6 +2376,12 @@ def validate_instance(t: Instance, fail: MsgCallback, empty_tuple_index: bool) - if not t.args: if not (empty_tuple_index and len(t.type.type_vars) == 1): # The Any arguments should be set by the caller. + if empty_tuple_index and min_tv_count: + fail( + f"At least {min_tv_count} type argument(s) expected, none given", + t, + code=codes.TYPE_ARG, + ) return False elif not correct: fail( diff --git a/mypy/typeops.py b/mypy/typeops.py index a59bd3739562a..4fe187f811ca3 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -95,8 +95,6 @@ def is_recursive_pair(s: Type, t: Type) -> bool: def tuple_fallback(typ: TupleType) -> Instance: """Return fallback type for a tuple.""" - from mypy.join import join_type_list - info = typ.partial_fallback.type if info.fullname != "builtins.tuple": return typ.partial_fallback @@ -115,8 +113,9 @@ def tuple_fallback(typ: TupleType) -> Instance: raise NotImplementedError else: items.append(item) - # TODO: we should really use a union here, tuple types are special. - return Instance(info, [join_type_list(items)], extra_attrs=typ.partial_fallback.extra_attrs) + return Instance( + info, [make_simplified_union(items)], extra_attrs=typ.partial_fallback.extra_attrs + ) def get_self_type(func: CallableType, default_self: Instance | TupleType) -> Type | None: @@ -152,7 +151,14 @@ def type_object_type_from_function( # ... # # We need to map B's __init__ to the type (List[T]) -> None. - signature = bind_self(signature, original_type=default_self, is_classmethod=is_new) + signature = bind_self( + signature, + original_type=default_self, + is_classmethod=is_new, + # Explicit instance self annotations have special handling in class_callable(), + # we don't need to bind any type variables in them if they are generic. + ignore_instances=True, + ) signature = cast(FunctionLike, map_type_from_supertype(signature, info, def_info)) special_sig: str | None = None @@ -244,7 +250,9 @@ class C(D[E[T]], Generic[T]): ... return expand_type_by_instance(typ, inst_type) -def supported_self_type(typ: ProperType, allow_callable: bool = True) -> bool: +def supported_self_type( + typ: ProperType, allow_callable: bool = True, allow_instances: bool = True +) -> bool: """Is this a supported kind of explicit self-types? Currently, this means an X or Type[X], where X is an instance or @@ -257,14 +265,19 @@ def supported_self_type(typ: ProperType, allow_callable: bool = True) -> bool: # as well as callable self for callback protocols. return True return isinstance(typ, TypeVarType) or ( - isinstance(typ, Instance) and typ != fill_typevars(typ.type) + allow_instances and isinstance(typ, Instance) and typ != fill_typevars(typ.type) ) F = TypeVar("F", bound=FunctionLike) -def bind_self(method: F, original_type: Type | None = None, is_classmethod: bool = False) -> F: +def bind_self( + method: F, + original_type: Type | None = None, + is_classmethod: bool = False, + ignore_instances: bool = False, +) -> F: """Return a copy of `method`, with the type of its first parameter (usually self or cls) bound to original_type. @@ -288,9 +301,10 @@ class B(A): pass """ if isinstance(method, Overloaded): - return cast( - F, Overloaded([bind_self(c, original_type, is_classmethod) for c in method.items]) - ) + items = [ + bind_self(c, original_type, is_classmethod, ignore_instances) for c in method.items + ] + return cast(F, Overloaded(items)) assert isinstance(method, CallableType) func = method if not func.arg_types: @@ -310,7 +324,9 @@ class B(A): pass # this special-casing looks not very principled, there is nothing meaningful we can infer # from such definition, since it is inherently indefinitely recursive. allow_callable = func.name is None or not func.name.startswith("__call__ of") - if func.variables and supported_self_type(self_param_type, allow_callable=allow_callable): + if func.variables and supported_self_type( + self_param_type, allow_callable=allow_callable, allow_instances=not ignore_instances + ): from mypy.infer import infer_type_arguments if original_type is None: diff --git a/mypy/types.py b/mypy/types.py index a138a337f2fea..f3ae337b05e33 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -501,7 +501,7 @@ class TypeVarId: # function type variables. # Metavariables are allocated unique ids starting from 1. - raw_id: int = 0 + raw_id: int # Level of the variable in type inference. Currently either 0 for # declared types, or 1 for type inference metavariables. @@ -510,9 +510,8 @@ class TypeVarId: # Class variable used for allocating fresh ids for metavariables. next_raw_id: ClassVar[int] = 1 - # Fullname of class (or potentially function in the future) which - # declares this type variable (not the fullname of the TypeVar - # definition!), or '' + # Fullname of class or function/method which declares this type + # variable (not the fullname of the TypeVar definition!), or '' namespace: str def __init__(self, raw_id: int, meta_level: int = 0, *, namespace: str = "") -> None: @@ -546,6 +545,10 @@ def __hash__(self) -> int: def is_meta_var(self) -> bool: return self.meta_level > 0 + def is_self(self) -> bool: + # This is a special value indicating typing.Self variable. + return self.raw_id == 0 + class TypeVarLikeType(ProperType): __slots__ = ("name", "fullname", "id", "upper_bound", "default") @@ -560,7 +563,7 @@ def __init__( self, name: str, fullname: str, - id: TypeVarId | int, + id: TypeVarId, upper_bound: Type, default: Type, line: int = -1, @@ -569,8 +572,6 @@ def __init__( super().__init__(line, column) self.name = name self.fullname = fullname - if isinstance(id, int): - id = TypeVarId(id) self.id = id self.upper_bound = upper_bound self.default = default @@ -607,7 +608,7 @@ def __init__( self, name: str, fullname: str, - id: TypeVarId | int, + id: TypeVarId, values: list[Type], upper_bound: Type, default: Type, @@ -626,7 +627,7 @@ def copy_modified( values: Bogus[list[Type]] = _dummy, upper_bound: Bogus[Type] = _dummy, default: Bogus[Type] = _dummy, - id: Bogus[TypeVarId | int] = _dummy, + id: Bogus[TypeVarId] = _dummy, line: int = _dummy_int, column: int = _dummy_int, **kwargs: Any, @@ -722,7 +723,7 @@ def __init__( self, name: str, fullname: str, - id: TypeVarId | int, + id: TypeVarId, flavor: int, upper_bound: Type, default: Type, @@ -749,7 +750,7 @@ def with_flavor(self, flavor: int) -> ParamSpecType: def copy_modified( self, *, - id: Bogus[TypeVarId | int] = _dummy, + id: Bogus[TypeVarId] = _dummy, flavor: int = _dummy_int, prefix: Bogus[Parameters] = _dummy, default: Bogus[Type] = _dummy, @@ -794,6 +795,7 @@ def serialize(self) -> JsonDict: "name": self.name, "fullname": self.fullname, "id": self.id.raw_id, + "namespace": self.id.namespace, "flavor": self.flavor, "upper_bound": self.upper_bound.serialize(), "default": self.default.serialize(), @@ -806,7 +808,7 @@ def deserialize(cls, data: JsonDict) -> ParamSpecType: return ParamSpecType( data["name"], data["fullname"], - data["id"], + TypeVarId(data["id"], namespace=data["namespace"]), data["flavor"], deserialize_type(data["upper_bound"]), deserialize_type(data["default"]), @@ -826,7 +828,7 @@ def __init__( self, name: str, fullname: str, - id: TypeVarId | int, + id: TypeVarId, upper_bound: Type, tuple_fallback: Instance, default: Type, @@ -848,6 +850,7 @@ def serialize(self) -> JsonDict: "name": self.name, "fullname": self.fullname, "id": self.id.raw_id, + "namespace": self.id.namespace, "upper_bound": self.upper_bound.serialize(), "tuple_fallback": self.tuple_fallback.serialize(), "default": self.default.serialize(), @@ -860,7 +863,7 @@ def deserialize(cls, data: JsonDict) -> TypeVarTupleType: return TypeVarTupleType( data["name"], data["fullname"], - data["id"], + TypeVarId(data["id"], namespace=data["namespace"]), deserialize_type(data["upper_bound"]), Instance.deserialize(data["tuple_fallback"]), deserialize_type(data["default"]), @@ -881,7 +884,7 @@ def __eq__(self, other: object) -> bool: def copy_modified( self, *, - id: Bogus[TypeVarId | int] = _dummy, + id: Bogus[TypeVarId] = _dummy, upper_bound: Bogus[Type] = _dummy, default: Bogus[Type] = _dummy, min_len: Bogus[int] = _dummy, @@ -2090,6 +2093,17 @@ def param_spec(self) -> ParamSpecType | None: prefix = Parameters(self.arg_types[:-2], self.arg_kinds[:-2], self.arg_names[:-2]) return arg_type.copy_modified(flavor=ParamSpecFlavor.BARE, prefix=prefix) + def normalize_trivial_unpack(self) -> None: + # Normalize trivial unpack in var args as *args: *tuple[X, ...] -> *args: X in place. + if self.is_var_arg: + star_index = self.arg_kinds.index(ARG_STAR) + star_type = self.arg_types[star_index] + if isinstance(star_type, UnpackType): + p_type = get_proper_type(star_type.type) + if isinstance(p_type, Instance): + assert p_type.type.fullname == "builtins.tuple" + self.arg_types[star_index] = p_type.args[0] + def with_unpacked_kwargs(self) -> NormalizedCallableType: if not self.unpack_kwargs: return cast(NormalizedCallableType, self) @@ -2119,7 +2133,7 @@ def with_normalized_var_args(self) -> Self: if not isinstance(unpacked, TupleType): # Note that we don't normalize *args: *tuple[X, ...] -> *args: X, # this should be done once in semanal_typeargs.py for user-defined types, - # and we ourselves should never construct such type. + # and we ourselves rarely construct such type. return self unpack_index = find_unpack_in_list(unpacked.items) if unpack_index == 0 and len(unpacked.items) > 1: @@ -2831,6 +2845,7 @@ def __init__( items: Sequence[Type], line: int = -1, column: int = -1, + *, is_evaluated: bool = True, uses_pep604_syntax: bool = False, ) -> None: @@ -3104,8 +3119,7 @@ def get_proper_type(typ: Type | None) -> ProperType | None: @overload -def get_proper_types(types: list[Type] | tuple[Type, ...]) -> list[ProperType]: # type: ignore[overload-overlap] - ... +def get_proper_types(types: list[Type] | tuple[Type, ...]) -> list[ProperType]: ... @overload @@ -3508,6 +3522,11 @@ def visit_instance(self, typ: Instance) -> None: typ.column = self.column super().visit_instance(typ) + def visit_type_alias_type(self, typ: TypeAliasType) -> None: + typ.line = self.line + typ.column = self.column + super().visit_type_alias_type(typ) + class HasTypeVars(BoolTypeQuery): def __init__(self) -> None: diff --git a/mypy/typeshed/stdlib/VERSIONS b/mypy/typeshed/stdlib/VERSIONS index a8526aab9422c..7b9ce2864484d 100644 --- a/mypy/typeshed/stdlib/VERSIONS +++ b/mypy/typeshed/stdlib/VERSIONS @@ -65,9 +65,9 @@ array: 3.0- ast: 3.0- asynchat: 3.0-3.11 asyncio: 3.4- -asyncio.mixins: 3.10- asyncio.exceptions: 3.8- asyncio.format_helpers: 3.7- +asyncio.mixins: 3.10- asyncio.runners: 3.7- asyncio.staggered: 3.8- asyncio.taskgroups: 3.11- @@ -270,6 +270,7 @@ threading: 3.0- time: 3.0- timeit: 3.0- tkinter: 3.0- +tkinter.tix: 3.0-3.12 token: 3.0- tokenize: 3.0- tomllib: 3.11- diff --git a/mypy/typeshed/stdlib/_ast.pyi b/mypy/typeshed/stdlib/_ast.pyi index 51791b4099d5c..d14c6d39a162e 100644 --- a/mypy/typeshed/stdlib/_ast.pyi +++ b/mypy/typeshed/stdlib/_ast.pyi @@ -7,8 +7,11 @@ PyCF_ONLY_AST: Literal[1024] PyCF_TYPE_COMMENTS: Literal[4096] PyCF_ALLOW_TOP_LEVEL_AWAIT: Literal[8192] +if sys.version_info >= (3, 13): + PyCF_OPTIMIZED_AST: Literal[33792] + # Used for node end positions in constructor keyword arguments -_EndPositionT = typing_extensions.TypeVar("_EndPositionT", int, int | None, default=int | None) # noqa: Y023 +_EndPositionT = typing_extensions.TypeVar("_EndPositionT", int, int | None, default=int | None) # Alias used for fields that must always be valid identifiers # A string `x` counts as a valid identifier if both the following are True diff --git a/mypy/typeshed/stdlib/_curses.pyi b/mypy/typeshed/stdlib/_curses.pyi index 6f3fbd807fcc4..eb1d7b9bde9f3 100644 --- a/mypy/typeshed/stdlib/_curses.pyi +++ b/mypy/typeshed/stdlib/_curses.pyi @@ -63,8 +63,7 @@ A_COLOR: int A_DIM: int A_HORIZONTAL: int A_INVIS: int -if sys.platform != "darwin": - A_ITALIC: int +A_ITALIC: int A_LEFT: int A_LOW: int A_NORMAL: int diff --git a/mypy/typeshed/stdlib/_json.pyi b/mypy/typeshed/stdlib/_json.pyi index a6a62be184d84..069fb6eac4bf5 100644 --- a/mypy/typeshed/stdlib/_json.pyi +++ b/mypy/typeshed/stdlib/_json.pyi @@ -45,5 +45,5 @@ class make_scanner: def __init__(self, context: make_scanner) -> None: ... def __call__(self, string: str, index: int) -> tuple[Any, int]: ... -def encode_basestring_ascii(s: str) -> str: ... +def encode_basestring_ascii(s: str, /) -> str: ... def scanstring(string: str, end: int, strict: bool = ...) -> tuple[str, int]: ... diff --git a/mypy/typeshed/stdlib/_tkinter.pyi b/mypy/typeshed/stdlib/_tkinter.pyi index 3340df4241634..aea74c8be279e 100644 --- a/mypy/typeshed/stdlib/_tkinter.pyi +++ b/mypy/typeshed/stdlib/_tkinter.pyi @@ -1,5 +1,7 @@ import sys +from collections.abc import Callable from typing import Any, ClassVar, Literal, final +from typing_extensions import TypeAlias # _tkinter is meant to be only used internally by tkinter, but some tkinter # functions e.g. return _tkinter.Tcl_Obj objects. Tcl_Obj represents a Tcl @@ -30,6 +32,8 @@ class Tcl_Obj: class TclError(Exception): ... +_TkinterTraceFunc: TypeAlias = Callable[[tuple[str, ...]], object] + # This class allows running Tcl code. Tkinter uses it internally a lot, and # it's often handy to drop a piece of Tcl code into a tkinter program. Example: # @@ -86,6 +90,9 @@ class TkappType: def unsetvar(self, *args, **kwargs): ... def wantobjects(self, *args, **kwargs): ... def willdispatch(self): ... + if sys.version_info >= (3, 12): + def gettrace(self, /) -> _TkinterTraceFunc | None: ... + def settrace(self, func: _TkinterTraceFunc | None, /) -> None: ... # These should be kept in sync with tkinter.tix constants, except ALL_EVENTS which doesn't match TCL_ALL_EVENTS ALL_EVENTS: Literal[-3] diff --git a/mypy/typeshed/stdlib/_weakref.pyi b/mypy/typeshed/stdlib/_weakref.pyi index 61365645d768a..f142820c56c72 100644 --- a/mypy/typeshed/stdlib/_weakref.pyi +++ b/mypy/typeshed/stdlib/_weakref.pyi @@ -21,8 +21,9 @@ class ProxyType(Generic[_T]): # "weakproxy" def __getattr__(self, attr: str) -> Any: ... class ReferenceType(Generic[_T]): - __callback__: Callable[[ReferenceType[_T]], Any] - def __new__(cls, o: _T, callback: Callable[[ReferenceType[_T]], Any] | None = ..., /) -> Self: ... + __callback__: Callable[[Self], Any] + def __new__(cls, o: _T, callback: Callable[[Self], Any] | None = ..., /) -> Self: ... + def __init__(self, o: _T, callback: Callable[[Self], Any] | None = ..., /) -> None: ... def __call__(self) -> _T | None: ... def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... diff --git a/mypy/typeshed/stdlib/ast.pyi b/mypy/typeshed/stdlib/ast.pyi index 2525c3642a6fc..90ede461fe3ce 100644 --- a/mypy/typeshed/stdlib/ast.pyi +++ b/mypy/typeshed/stdlib/ast.pyi @@ -365,3 +365,6 @@ def walk(node: AST) -> Iterator[AST]: ... if sys.version_info >= (3, 9): def main() -> None: ... + +if sys.version_info >= (3, 14): + def compare(left: AST, right: AST, /, *, compare_attributes: bool = False) -> bool: ... diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index 4a6c4bbcae459..28b0b11a8e5c9 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -445,7 +445,7 @@ class str(Sequence[str]): def expandtabs(self, tabsize: SupportsIndex = 8) -> str: ... # type: ignore[misc] def find(self, sub: str, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /) -> int: ... def format(self, *args: object, **kwargs: object) -> str: ... - def format_map(self, map: _FormatMapMapping) -> str: ... + def format_map(self, mapping: _FormatMapMapping, /) -> str: ... def index(self, sub: str, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /) -> int: ... def isalnum(self) -> bool: ... def isalpha(self) -> bool: ... @@ -464,7 +464,10 @@ class str(Sequence[str]): def lower(self) -> str: ... # type: ignore[misc] def lstrip(self, chars: str | None = None, /) -> str: ... # type: ignore[misc] def partition(self, sep: str, /) -> tuple[str, str, str]: ... # type: ignore[misc] - def replace(self, old: str, new: str, count: SupportsIndex = -1, /) -> str: ... # type: ignore[misc] + if sys.version_info >= (3, 13): + def replace(self, old: str, new: str, /, count: SupportsIndex = -1) -> str: ... # type: ignore[misc] + else: + def replace(self, old: str, new: str, count: SupportsIndex = -1, /) -> str: ... # type: ignore[misc] if sys.version_info >= (3, 9): def removeprefix(self, prefix: str, /) -> str: ... # type: ignore[misc] def removesuffix(self, suffix: str, /) -> str: ... # type: ignore[misc] @@ -1126,6 +1129,9 @@ class property: fset: Callable[[Any, Any], None] | None fdel: Callable[[Any], None] | None __isabstractmethod__: bool + if sys.version_info >= (3, 13): + __name__: str + def __init__( self, fget: Callable[[Any], Any] | None = ..., @@ -1572,9 +1578,9 @@ def pow(base: float, exp: complex | _SupportsSomeKindOfPow, mod: None = None) -> @overload def pow(base: complex, exp: complex | _SupportsSomeKindOfPow, mod: None = None) -> complex: ... @overload -def pow(base: _SupportsPow2[_E, _T_co], exp: _E, mod: None = None) -> _T_co: ... +def pow(base: _SupportsPow2[_E, _T_co], exp: _E, mod: None = None) -> _T_co: ... # type: ignore[overload-overlap] @overload -def pow(base: _SupportsPow3NoneOnly[_E, _T_co], exp: _E, mod: None = None) -> _T_co: ... +def pow(base: _SupportsPow3NoneOnly[_E, _T_co], exp: _E, mod: None = None) -> _T_co: ... # type: ignore[overload-overlap] @overload def pow(base: _SupportsPow3[_E, _M, _T_co], exp: _E, mod: _M) -> _T_co: ... @overload @@ -1969,3 +1975,7 @@ if sys.version_info >= (3, 11): def split( self, condition: Callable[[_ExceptionT_co | Self], bool], / ) -> tuple[ExceptionGroup[_ExceptionT_co] | None, ExceptionGroup[_ExceptionT_co] | None]: ... + +if sys.version_info >= (3, 13): + class IncompleteInputError(SyntaxError): ... + class PythonFinalizationError(RuntimeError): ... diff --git a/mypy/typeshed/stdlib/configparser.pyi b/mypy/typeshed/stdlib/configparser.pyi index 07b57b17d56d7..f38bb1de674de 100644 --- a/mypy/typeshed/stdlib/configparser.pyi +++ b/mypy/typeshed/stdlib/configparser.pyi @@ -5,7 +5,31 @@ from re import Pattern from typing import Any, ClassVar, Literal, TypeVar, overload from typing_extensions import TypeAlias -if sys.version_info >= (3, 12): +if sys.version_info >= (3, 13): + __all__ = ( + "NoSectionError", + "DuplicateOptionError", + "DuplicateSectionError", + "NoOptionError", + "InterpolationError", + "InterpolationDepthError", + "InterpolationMissingOptionError", + "InterpolationSyntaxError", + "ParsingError", + "MissingSectionHeaderError", + "ConfigParser", + "RawConfigParser", + "Interpolation", + "BasicInterpolation", + "ExtendedInterpolation", + "SectionProxy", + "ConverterMapping", + "DEFAULTSECT", + "MAX_INTERPOLATION_DEPTH", + "UNNAMED_SECTION", + "MultilineContinuationError", + ) +elif sys.version_info >= (3, 12): __all__ = ( "NoSectionError", "DuplicateOptionError", @@ -71,8 +95,9 @@ class Interpolation: class BasicInterpolation(Interpolation): ... class ExtendedInterpolation(Interpolation): ... -class LegacyInterpolation(Interpolation): - def before_get(self, parser: _Parser, section: str, option: str, value: str, vars: _Section) -> str: ... +if sys.version_info < (3, 13): + class LegacyInterpolation(Interpolation): + def before_get(self, parser: _Parser, section: str, option: str, value: str, vars: _Section) -> str: ... class RawConfigParser(_Parser): _SECT_TMPL: ClassVar[str] # undocumented @@ -86,54 +111,108 @@ class RawConfigParser(_Parser): BOOLEAN_STATES: ClassVar[Mapping[str, bool]] # undocumented default_section: str - @overload - def __init__( - self, - defaults: Mapping[str, str | None] | None = None, - dict_type: type[Mapping[str, str]] = ..., - *, - allow_no_value: Literal[True], - delimiters: Sequence[str] = ("=", ":"), - comment_prefixes: Sequence[str] = ("#", ";"), - inline_comment_prefixes: Sequence[str] | None = None, - strict: bool = True, - empty_lines_in_values: bool = True, - default_section: str = "DEFAULT", - interpolation: Interpolation | None = ..., - converters: _ConvertersMap = ..., - ) -> None: ... - @overload - def __init__( - self, - defaults: Mapping[str, str | None] | None, - dict_type: type[Mapping[str, str]], - allow_no_value: Literal[True], - *, - delimiters: Sequence[str] = ("=", ":"), - comment_prefixes: Sequence[str] = ("#", ";"), - inline_comment_prefixes: Sequence[str] | None = None, - strict: bool = True, - empty_lines_in_values: bool = True, - default_section: str = "DEFAULT", - interpolation: Interpolation | None = ..., - converters: _ConvertersMap = ..., - ) -> None: ... - @overload - def __init__( - self, - defaults: _Section | None = None, - dict_type: type[Mapping[str, str]] = ..., - allow_no_value: bool = False, - *, - delimiters: Sequence[str] = ("=", ":"), - comment_prefixes: Sequence[str] = ("#", ";"), - inline_comment_prefixes: Sequence[str] | None = None, - strict: bool = True, - empty_lines_in_values: bool = True, - default_section: str = "DEFAULT", - interpolation: Interpolation | None = ..., - converters: _ConvertersMap = ..., - ) -> None: ... + if sys.version_info >= (3, 13): + @overload + def __init__( + self, + defaults: Mapping[str, str | None] | None = None, + dict_type: type[Mapping[str, str]] = ..., + *, + allow_no_value: Literal[True], + delimiters: Sequence[str] = ("=", ":"), + comment_prefixes: Sequence[str] = ("#", ";"), + inline_comment_prefixes: Sequence[str] | None = None, + strict: bool = True, + empty_lines_in_values: bool = True, + default_section: str = "DEFAULT", + interpolation: Interpolation | None = ..., + converters: _ConvertersMap = ..., + allow_unnamed_section: bool = False, + ) -> None: ... + @overload + def __init__( + self, + defaults: Mapping[str, str | None] | None, + dict_type: type[Mapping[str, str]], + allow_no_value: Literal[True], + *, + delimiters: Sequence[str] = ("=", ":"), + comment_prefixes: Sequence[str] = ("#", ";"), + inline_comment_prefixes: Sequence[str] | None = None, + strict: bool = True, + empty_lines_in_values: bool = True, + default_section: str = "DEFAULT", + interpolation: Interpolation | None = ..., + converters: _ConvertersMap = ..., + allow_unnamed_section: bool = False, + ) -> None: ... + @overload + def __init__( + self, + defaults: _Section | None = None, + dict_type: type[Mapping[str, str]] = ..., + allow_no_value: bool = False, + *, + delimiters: Sequence[str] = ("=", ":"), + comment_prefixes: Sequence[str] = ("#", ";"), + inline_comment_prefixes: Sequence[str] | None = None, + strict: bool = True, + empty_lines_in_values: bool = True, + default_section: str = "DEFAULT", + interpolation: Interpolation | None = ..., + converters: _ConvertersMap = ..., + allow_unnamed_section: bool = False, + ) -> None: ... + else: + @overload + def __init__( + self, + defaults: Mapping[str, str | None] | None = None, + dict_type: type[Mapping[str, str]] = ..., + *, + allow_no_value: Literal[True], + delimiters: Sequence[str] = ("=", ":"), + comment_prefixes: Sequence[str] = ("#", ";"), + inline_comment_prefixes: Sequence[str] | None = None, + strict: bool = True, + empty_lines_in_values: bool = True, + default_section: str = "DEFAULT", + interpolation: Interpolation | None = ..., + converters: _ConvertersMap = ..., + ) -> None: ... + @overload + def __init__( + self, + defaults: Mapping[str, str | None] | None, + dict_type: type[Mapping[str, str]], + allow_no_value: Literal[True], + *, + delimiters: Sequence[str] = ("=", ":"), + comment_prefixes: Sequence[str] = ("#", ";"), + inline_comment_prefixes: Sequence[str] | None = None, + strict: bool = True, + empty_lines_in_values: bool = True, + default_section: str = "DEFAULT", + interpolation: Interpolation | None = ..., + converters: _ConvertersMap = ..., + ) -> None: ... + @overload + def __init__( + self, + defaults: _Section | None = None, + dict_type: type[Mapping[str, str]] = ..., + allow_no_value: bool = False, + *, + delimiters: Sequence[str] = ("=", ":"), + comment_prefixes: Sequence[str] = ("#", ";"), + inline_comment_prefixes: Sequence[str] | None = None, + strict: bool = True, + empty_lines_in_values: bool = True, + default_section: str = "DEFAULT", + interpolation: Interpolation | None = ..., + converters: _ConvertersMap = ..., + ) -> None: ... + def __len__(self) -> int: ... def __getitem__(self, key: str) -> SectionProxy: ... def __setitem__(self, key: str, value: _Section) -> None: ... @@ -300,7 +379,10 @@ class InterpolationSyntaxError(InterpolationError): ... class ParsingError(Error): source: str errors: list[tuple[int, str]] - if sys.version_info >= (3, 12): + if sys.version_info >= (3, 13): + def __init__(self, source: str, *args: object) -> None: ... + def combine(self, others: Iterable[ParsingError]) -> ParsingError: ... + elif sys.version_info >= (3, 12): def __init__(self, source: str) -> None: ... else: def __init__(self, source: str | None = None, filename: str | None = None) -> None: ... @@ -311,3 +393,12 @@ class MissingSectionHeaderError(ParsingError): lineno: int line: str def __init__(self, filename: str, lineno: int, line: str) -> None: ... + +if sys.version_info >= (3, 13): + class _UNNAMED_SECTION: ... + UNNAMED_SECTION: _UNNAMED_SECTION + + class MultilineContinuationError(ParsingError): + lineno: int + line: str + def __init__(self, filename: str, lineno: int, line: str) -> None: ... diff --git a/mypy/typeshed/stdlib/dataclasses.pyi b/mypy/typeshed/stdlib/dataclasses.pyi index 18c7e7b5a4671..30489e6f8b3da 100644 --- a/mypy/typeshed/stdlib/dataclasses.pyi +++ b/mypy/typeshed/stdlib/dataclasses.pyi @@ -5,7 +5,7 @@ from _typeshed import DataclassInstance from builtins import type as Type # alias to avoid name clashes with fields named "type" from collections.abc import Callable, Iterable, Mapping from typing import Any, Generic, Literal, Protocol, TypeVar, overload -from typing_extensions import TypeAlias, TypeGuard +from typing_extensions import TypeAlias, TypeIs if sys.version_info >= (3, 9): from types import GenericAlias @@ -214,11 +214,9 @@ else: def fields(class_or_instance: DataclassInstance | type[DataclassInstance]) -> tuple[Field[Any], ...]: ... @overload -def is_dataclass(obj: DataclassInstance) -> Literal[True]: ... +def is_dataclass(obj: type) -> TypeIs[type[DataclassInstance]]: ... @overload -def is_dataclass(obj: type) -> TypeGuard[type[DataclassInstance]]: ... -@overload -def is_dataclass(obj: object) -> TypeGuard[DataclassInstance | type[DataclassInstance]]: ... +def is_dataclass(obj: object) -> TypeIs[DataclassInstance | type[DataclassInstance]]: ... class FrozenInstanceError(AttributeError): ... diff --git a/mypy/typeshed/stdlib/enum.pyi b/mypy/typeshed/stdlib/enum.pyi index 96cb2264ea20c..5c82b07c41852 100644 --- a/mypy/typeshed/stdlib/enum.pyi +++ b/mypy/typeshed/stdlib/enum.pyi @@ -31,10 +31,12 @@ if sys.version_info >= (3, 11): "nonmember", "property", "verify", + "pickle_by_enum_name", + "pickle_by_global_name", ] -if sys.version_info >= (3, 11): - __all__ += ["pickle_by_enum_name", "pickle_by_global_name"] +if sys.version_info >= (3, 13): + __all__ += ["EnumDict"] _EnumMemberT = TypeVar("_EnumMemberT") _EnumerationT = TypeVar("_EnumerationT", bound=type[Enum]) @@ -74,6 +76,12 @@ class _EnumDict(dict[str, Any]): def update(self, members: SupportsKeysAndGetItem[str, Any], **more_members: Any) -> None: ... @overload def update(self, members: Iterable[tuple[str, Any]], **more_members: Any) -> None: ... + if sys.version_info >= (3, 13): + @property + def member_names(self) -> list[str]: ... + +if sys.version_info >= (3, 13): + EnumDict = _EnumDict # Structurally: Iterable[T], Reversible[T], Container[T] where T is the enum itself class EnumMeta(type): @@ -259,9 +267,9 @@ if sys.version_info >= (3, 11): def _generate_next_value_(name: str, start: int, count: int, last_values: list[str]) -> str: ... class EnumCheck(StrEnum): - CONTINUOUS: str - NAMED_FLAGS: str - UNIQUE: str + CONTINUOUS = "no skipped integer values" + NAMED_FLAGS = "multi-flag aliases may not contain unnamed flags" + UNIQUE = "one name per value" CONTINUOUS = EnumCheck.CONTINUOUS NAMED_FLAGS = EnumCheck.NAMED_FLAGS @@ -272,10 +280,10 @@ if sys.version_info >= (3, 11): def __call__(self, enumeration: _EnumerationT) -> _EnumerationT: ... class FlagBoundary(StrEnum): - STRICT: str - CONFORM: str - EJECT: str - KEEP: str + STRICT = "strict" + CONFORM = "conform" + EJECT = "eject" + KEEP = "keep" STRICT = FlagBoundary.STRICT CONFORM = FlagBoundary.CONFORM diff --git a/mypy/typeshed/stdlib/glob.pyi b/mypy/typeshed/stdlib/glob.pyi index 914ccc12ef1e3..03cb5418e2565 100644 --- a/mypy/typeshed/stdlib/glob.pyi +++ b/mypy/typeshed/stdlib/glob.pyi @@ -1,10 +1,13 @@ import sys from _typeshed import StrOrBytesPath -from collections.abc import Iterator +from collections.abc import Iterator, Sequence from typing import AnyStr __all__ = ["escape", "glob", "iglob"] +if sys.version_info >= (3, 13): + __all__ += ["translate"] + def glob0(dirname: AnyStr, pattern: AnyStr) -> list[AnyStr]: ... def glob1(dirname: AnyStr, pattern: AnyStr) -> list[AnyStr]: ... @@ -40,3 +43,8 @@ else: def escape(pathname: AnyStr) -> AnyStr: ... def has_magic(s: str | bytes) -> bool: ... # undocumented + +if sys.version_info >= (3, 13): + def translate( + pat: str, *, recursive: bool = False, include_hidden: bool = False, seps: Sequence[str] | None = None + ) -> str: ... diff --git a/mypy/typeshed/stdlib/io.pyi b/mypy/typeshed/stdlib/io.pyi index fdbbc8dddce9b..01f3bfc06a27a 100644 --- a/mypy/typeshed/stdlib/io.pyi +++ b/mypy/typeshed/stdlib/io.pyi @@ -75,7 +75,7 @@ class IOBase(metaclass=abc.ABCMeta): def __del__(self) -> None: ... @property def closed(self) -> bool: ... - def _checkClosed(self, msg: str | None = ...) -> None: ... # undocumented + def _checkClosed(self) -> None: ... # undocumented class RawIOBase(IOBase): def readall(self) -> bytes: ... diff --git a/mypy/typeshed/stdlib/ipaddress.pyi b/mypy/typeshed/stdlib/ipaddress.pyi index 98b1893d2a8ae..03decc74e65e7 100644 --- a/mypy/typeshed/stdlib/ipaddress.pyi +++ b/mypy/typeshed/stdlib/ipaddress.pyi @@ -147,7 +147,11 @@ class _BaseV4: @property def max_prefixlen(self) -> Literal[32]: ... -class IPv4Address(_BaseV4, _BaseAddress): ... +class IPv4Address(_BaseV4, _BaseAddress): + if sys.version_info >= (3, 13): + @property + def ipv6_mapped(self) -> IPv6Address: ... + class IPv4Network(_BaseV4, _BaseNetwork[IPv4Address]): ... class IPv4Interface(IPv4Address, _BaseInterface[IPv4Address, IPv4Network]): diff --git a/mypy/typeshed/stdlib/itertools.pyi b/mypy/typeshed/stdlib/itertools.pyi index 264064dcd682f..16e04829c6cf9 100644 --- a/mypy/typeshed/stdlib/itertools.pyi +++ b/mypy/typeshed/stdlib/itertools.pyi @@ -17,6 +17,10 @@ _T3 = TypeVar("_T3") _T4 = TypeVar("_T4") _T5 = TypeVar("_T5") _T6 = TypeVar("_T6") +_T7 = TypeVar("_T7") +_T8 = TypeVar("_T8") +_T9 = TypeVar("_T9") +_T10 = TypeVar("_T10") _Step: TypeAlias = SupportsFloat | SupportsInt | SupportsIndex | SupportsComplex @@ -214,6 +218,60 @@ class product(Iterator[_T_co]): /, ) -> product[tuple[_T1, _T2, _T3, _T4, _T5, _T6]]: ... @overload + def __new__( + cls, + iter1: Iterable[_T1], + iter2: Iterable[_T2], + iter3: Iterable[_T3], + iter4: Iterable[_T4], + iter5: Iterable[_T5], + iter6: Iterable[_T6], + iter7: Iterable[_T7], + /, + ) -> product[tuple[_T1, _T2, _T3, _T4, _T5, _T6, _T7]]: ... + @overload + def __new__( + cls, + iter1: Iterable[_T1], + iter2: Iterable[_T2], + iter3: Iterable[_T3], + iter4: Iterable[_T4], + iter5: Iterable[_T5], + iter6: Iterable[_T6], + iter7: Iterable[_T7], + iter8: Iterable[_T8], + /, + ) -> product[tuple[_T1, _T2, _T3, _T4, _T5, _T6, _T7, _T8]]: ... + @overload + def __new__( + cls, + iter1: Iterable[_T1], + iter2: Iterable[_T2], + iter3: Iterable[_T3], + iter4: Iterable[_T4], + iter5: Iterable[_T5], + iter6: Iterable[_T6], + iter7: Iterable[_T7], + iter8: Iterable[_T8], + iter9: Iterable[_T9], + /, + ) -> product[tuple[_T1, _T2, _T3, _T4, _T5, _T6, _T7, _T8, _T9]]: ... + @overload + def __new__( + cls, + iter1: Iterable[_T1], + iter2: Iterable[_T2], + iter3: Iterable[_T3], + iter4: Iterable[_T4], + iter5: Iterable[_T5], + iter6: Iterable[_T6], + iter7: Iterable[_T7], + iter8: Iterable[_T8], + iter9: Iterable[_T9], + iter10: Iterable[_T10], + /, + ) -> product[tuple[_T1, _T2, _T3, _T4, _T5, _T6, _T7, _T8, _T9, _T10]]: ... + @overload def __new__(cls, *iterables: Iterable[_T1], repeat: int = 1) -> product[tuple[_T1, ...]]: ... def __iter__(self) -> Self: ... def __next__(self) -> _T_co: ... diff --git a/mypy/typeshed/stdlib/json/encoder.pyi b/mypy/typeshed/stdlib/json/encoder.pyi index c1062688bd93e..473398a60b2a0 100644 --- a/mypy/typeshed/stdlib/json/encoder.pyi +++ b/mypy/typeshed/stdlib/json/encoder.pyi @@ -10,8 +10,8 @@ INFINITY: float def py_encode_basestring(s: str) -> str: ... # undocumented def py_encode_basestring_ascii(s: str) -> str: ... # undocumented -def encode_basestring(s: str) -> str: ... # undocumented -def encode_basestring_ascii(s: str) -> str: ... # undocumented +def encode_basestring(s: str, /) -> str: ... # undocumented +def encode_basestring_ascii(s: str, /) -> str: ... # undocumented class JSONEncoder: item_separator: str diff --git a/mypy/typeshed/stdlib/locale.pyi b/mypy/typeshed/stdlib/locale.pyi index c18523e043616..58de654495723 100644 --- a/mypy/typeshed/stdlib/locale.pyi +++ b/mypy/typeshed/stdlib/locale.pyi @@ -96,7 +96,6 @@ __all__ = [ "getpreferredencoding", "Error", "setlocale", - "resetlocale", "localeconv", "strcoll", "strxfrm", @@ -121,6 +120,9 @@ if sys.version_info >= (3, 11): if sys.version_info < (3, 12): __all__ += ["format"] +if sys.version_info < (3, 13): + __all__ += ["resetlocale"] + if sys.platform != "win32": __all__ += ["LC_MESSAGES"] @@ -133,7 +135,9 @@ def getlocale(category: int = ...) -> tuple[_str | None, _str | None]: ... def setlocale(category: int, locale: _str | Iterable[_str | None] | None = None) -> _str: ... def getpreferredencoding(do_setlocale: bool = True) -> _str: ... def normalize(localename: _str) -> _str: ... -def resetlocale(category: int = ...) -> None: ... + +if sys.version_info < (3, 13): + def resetlocale(category: int = ...) -> None: ... if sys.version_info < (3, 12): def format( diff --git a/mypy/typeshed/stdlib/logging/__init__.pyi b/mypy/typeshed/stdlib/logging/__init__.pyi index 8b19444a5d013..4c6163257236f 100644 --- a/mypy/typeshed/stdlib/logging/__init__.pyi +++ b/mypy/typeshed/stdlib/logging/__init__.pyi @@ -8,7 +8,7 @@ from string import Template from time import struct_time from types import FrameType, TracebackType from typing import Any, ClassVar, Generic, Literal, Protocol, TextIO, TypeVar, overload -from typing_extensions import Self, TypeAlias +from typing_extensions import Self, TypeAlias, deprecated if sys.version_info >= (3, 11): from types import GenericAlias @@ -572,7 +572,11 @@ fatal = critical def disable(level: int = 50) -> None: ... def addLevelName(level: int, levelName: str) -> None: ... -def getLevelName(level: _Level) -> Any: ... +@overload +def getLevelName(level: int) -> str: ... +@overload +@deprecated("The str -> int case is considered a mistake.") +def getLevelName(level: str) -> Any: ... if sys.version_info >= (3, 11): def getLevelNamesMapping() -> dict[str, int]: ... diff --git a/mypy/typeshed/stdlib/logging/handlers.pyi b/mypy/typeshed/stdlib/logging/handlers.pyi index 4c3dc913308cc..4e97012abba11 100644 --- a/mypy/typeshed/stdlib/logging/handlers.pyi +++ b/mypy/typeshed/stdlib/logging/handlers.pyi @@ -46,7 +46,7 @@ class BaseRotatingHandler(FileHandler): def rotate(self, source: str, dest: str) -> None: ... class RotatingFileHandler(BaseRotatingHandler): - maxBytes: str # undocumented + maxBytes: int # undocumented backupCount: int # undocumented if sys.version_info >= (3, 9): def __init__( diff --git a/mypy/typeshed/stdlib/math.pyi b/mypy/typeshed/stdlib/math.pyi index 0e6565fcf5883..2bb61e0669b4d 100644 --- a/mypy/typeshed/stdlib/math.pyi +++ b/mypy/typeshed/stdlib/math.pyi @@ -125,4 +125,4 @@ if sys.version_info >= (3, 9): def ulp(x: _SupportsFloatOrIndex, /) -> float: ... if sys.version_info >= (3, 13): - def fma(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, z: _SupportsFloatOrIndex) -> float: ... + def fma(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, z: _SupportsFloatOrIndex, /) -> float: ... diff --git a/mypy/typeshed/stdlib/mimetypes.pyi b/mypy/typeshed/stdlib/mimetypes.pyi index e74b214d3ff10..517193e3516f1 100644 --- a/mypy/typeshed/stdlib/mimetypes.pyi +++ b/mypy/typeshed/stdlib/mimetypes.pyi @@ -1,3 +1,4 @@ +import sys from _typeshed import StrPath from collections.abc import Sequence from typing import IO @@ -18,6 +19,9 @@ __all__ = [ "common_types", ] +if sys.version_info >= (3, 13): + __all__ += ["guess_file_type"] + def guess_type(url: StrPath, strict: bool = True) -> tuple[str | None, str | None]: ... def guess_all_extensions(type: str, strict: bool = True) -> list[str]: ... def guess_extension(type: str, strict: bool = True) -> str | None: ... @@ -25,6 +29,9 @@ def init(files: Sequence[str] | None = None) -> None: ... def read_mime_types(file: str) -> dict[str, str] | None: ... def add_type(type: str, ext: str, strict: bool = True) -> None: ... +if sys.version_info >= (3, 13): + def guess_file_type(path: StrPath, *, strict: bool = True) -> tuple[str | None, str | None]: ... + inited: bool knownfiles: list[str] suffix_map: dict[str, str] @@ -44,3 +51,5 @@ class MimeTypes: def read(self, filename: str, strict: bool = True) -> None: ... def readfp(self, fp: IO[str], strict: bool = True) -> None: ... def read_windows_registry(self, strict: bool = True) -> None: ... + if sys.version_info >= (3, 13): + def guess_file_type(self, path: StrPath, *, strict: bool = True) -> tuple[str | None, str | None]: ... diff --git a/mypy/typeshed/stdlib/mmap.pyi b/mypy/typeshed/stdlib/mmap.pyi index 93c4f408e5b6d..7688970e57863 100644 --- a/mypy/typeshed/stdlib/mmap.pyi +++ b/mypy/typeshed/stdlib/mmap.pyi @@ -1,7 +1,7 @@ import sys from _typeshed import ReadableBuffer, Unused from collections.abc import Iterable, Iterator, Sized -from typing import NoReturn, overload +from typing import Final, NoReturn, overload from typing_extensions import Self ACCESS_DEFAULT: int @@ -76,6 +76,8 @@ class mmap(Iterable[int], Sized): def __exit__(self, *args: Unused) -> None: ... def __buffer__(self, flags: int, /) -> memoryview: ... def __release_buffer__(self, buffer: memoryview, /) -> None: ... + if sys.version_info >= (3, 13): + def seekable(self) -> bool: ... if sys.platform != "win32": MADV_NORMAL: int @@ -111,3 +113,9 @@ if sys.platform != "linux" and sys.platform != "darwin" and sys.platform != "win if sys.version_info >= (3, 10) and sys.platform == "darwin": MADV_FREE_REUSABLE: int MADV_FREE_REUSE: int + +if sys.version_info >= (3, 13) and sys.platform != "win32": + MAP_32BIT: Final = 32768 + +if sys.version_info >= (3, 13) and sys.platform == "darwin": + MAP_TPRO: Final = 524288 diff --git a/mypy/typeshed/stdlib/multiprocessing/context.pyi b/mypy/typeshed/stdlib/multiprocessing/context.pyi index 9a45a81559c04..605be4686c1ff 100644 --- a/mypy/typeshed/stdlib/multiprocessing/context.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/context.pyi @@ -93,16 +93,20 @@ class BaseContext: def Value(self, typecode_or_type: str | type[_CData], *args: Any, lock: bool | _LockLike = True) -> Any: ... @overload def Array( - self, typecode_or_type: type[c_char], size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = True - ) -> SynchronizedString: ... + self, typecode_or_type: type[_SimpleCData[_T]], size_or_initializer: int | Sequence[Any], *, lock: Literal[False] + ) -> SynchronizedArray[_T]: ... @overload def Array( - self, typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[False] - ) -> SynchronizedArray[_CT]: ... + self, typecode_or_type: type[c_char], size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = True + ) -> SynchronizedString: ... @overload def Array( - self, typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = True - ) -> SynchronizedArray[_CT]: ... + self, + typecode_or_type: type[_SimpleCData[_T]], + size_or_initializer: int | Sequence[Any], + *, + lock: Literal[True] | _LockLike = True, + ) -> SynchronizedArray[_T]: ... @overload def Array( self, typecode_or_type: str, size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = True diff --git a/mypy/typeshed/stdlib/multiprocessing/managers.pyi b/mypy/typeshed/stdlib/multiprocessing/managers.pyi index 9b2d2970112e8..5d5b9cdcb9135 100644 --- a/mypy/typeshed/stdlib/multiprocessing/managers.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/managers.pyi @@ -83,6 +83,8 @@ class DictProxy(BaseProxy, MutableMapping[_KT, _VT]): def keys(self) -> list[_KT]: ... # type: ignore[override] def items(self) -> list[tuple[_KT, _VT]]: ... # type: ignore[override] def values(self) -> list[_VT]: ... # type: ignore[override] + if sys.version_info >= (3, 13): + def __class_getitem__(cls, args: Any, /) -> Any: ... class BaseListProxy(BaseProxy, MutableSequence[_T]): __builtins__: ClassVar[dict[str, Any]] @@ -117,6 +119,8 @@ class BaseListProxy(BaseProxy, MutableSequence[_T]): class ListProxy(BaseListProxy[_T]): def __iadd__(self, value: Iterable[_T], /) -> Self: ... # type: ignore[override] def __imul__(self, value: SupportsIndex, /) -> Self: ... # type: ignore[override] + if sys.version_info >= (3, 13): + def __class_getitem__(cls, args: Any, /) -> Any: ... # Returned by BaseManager.get_server() class Server: diff --git a/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi b/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi index 0a6b113b194fc..b63cedf858676 100644 --- a/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi @@ -11,7 +11,11 @@ __all__ = ["SharedMemory", "ShareableList"] _SLT = TypeVar("_SLT", int, float, bool, str, bytes, None) class SharedMemory: - def __init__(self, name: str | None = None, create: bool = False, size: int = 0) -> None: ... + if sys.version_info >= (3, 13): + def __init__(self, name: str | None = None, create: bool = False, size: int = 0, *, track: bool = True) -> None: ... + else: + def __init__(self, name: str | None = None, create: bool = False, size: int = 0) -> None: ... + @property def buf(self) -> memoryview: ... @property diff --git a/mypy/typeshed/stdlib/multiprocessing/sharedctypes.pyi b/mypy/typeshed/stdlib/multiprocessing/sharedctypes.pyi index 4093a97e6ca33..2b96ff0474706 100644 --- a/mypy/typeshed/stdlib/multiprocessing/sharedctypes.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/sharedctypes.pyi @@ -39,12 +39,20 @@ def Array( ) -> _CT: ... @overload def Array( - typecode_or_type: type[_CT], + typecode_or_type: type[c_char], size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = True, ctx: BaseContext | None = None, -) -> SynchronizedArray[_CT]: ... +) -> SynchronizedString: ... +@overload +def Array( + typecode_or_type: type[_SimpleCData[_T]], + size_or_initializer: int | Sequence[Any], + *, + lock: Literal[True] | _LockLike = True, + ctx: BaseContext | None = None, +) -> SynchronizedArray[_T]: ... @overload def Array( typecode_or_type: str, @@ -65,9 +73,11 @@ def copy(obj: _CT) -> _CT: ... @overload def synchronized(obj: _SimpleCData[_T], lock: _LockLike | None = None, ctx: Any | None = None) -> Synchronized[_T]: ... @overload -def synchronized(obj: ctypes.Array[c_char], lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedString: ... +def synchronized(obj: ctypes.Array[c_char], lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedString: ... # type: ignore @overload -def synchronized(obj: ctypes.Array[_CT], lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedArray[_CT]: ... +def synchronized( + obj: ctypes.Array[_SimpleCData[_T]], lock: _LockLike | None = None, ctx: Any | None = None +) -> SynchronizedArray[_T]: ... @overload def synchronized(obj: _CT, lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedBase[_CT]: ... @@ -89,19 +99,30 @@ class SynchronizedBase(Generic[_CT]): class Synchronized(SynchronizedBase[_SimpleCData[_T]], Generic[_T]): value: _T -class SynchronizedArray(SynchronizedBase[ctypes.Array[_CT]], Generic[_CT]): +class SynchronizedArray(SynchronizedBase[ctypes.Array[_SimpleCData[_T]]], Generic[_T]): def __len__(self) -> int: ... @overload - def __getitem__(self, i: slice) -> list[_CT]: ... + def __getitem__(self, i: slice) -> list[_T]: ... @overload - def __getitem__(self, i: int) -> _CT: ... + def __getitem__(self, i: int) -> _T: ... @overload - def __setitem__(self, i: slice, value: Iterable[_CT]) -> None: ... + def __setitem__(self, i: slice, value: Iterable[_T]) -> None: ... @overload - def __setitem__(self, i: int, value: _CT) -> None: ... - def __getslice__(self, start: int, stop: int) -> list[_CT]: ... - def __setslice__(self, start: int, stop: int, values: Iterable[_CT]) -> None: ... + def __setitem__(self, i: int, value: _T) -> None: ... + def __getslice__(self, start: int, stop: int) -> list[_T]: ... + def __setslice__(self, start: int, stop: int, values: Iterable[_T]) -> None: ... + +class SynchronizedString(SynchronizedArray[bytes]): + @overload # type: ignore[override] + def __getitem__(self, i: slice) -> bytes: ... + @overload # type: ignore[override] + def __getitem__(self, i: int) -> bytes: ... + @overload # type: ignore[override] + def __setitem__(self, i: slice, value: bytes) -> None: ... + @overload # type: ignore[override] + def __setitem__(self, i: int, value: bytes) -> None: ... # type: ignore[override] + def __getslice__(self, start: int, stop: int) -> bytes: ... # type: ignore[override] + def __setslice__(self, start: int, stop: int, values: bytes) -> None: ... # type: ignore[override] -class SynchronizedString(SynchronizedArray[c_char]): value: bytes raw: bytes diff --git a/mypy/typeshed/stdlib/os/__init__.pyi b/mypy/typeshed/stdlib/os/__init__.pyi index 31c5d2aa3ee6b..9b00117a55999 100644 --- a/mypy/typeshed/stdlib/os/__init__.pyi +++ b/mypy/typeshed/stdlib/os/__init__.pyi @@ -914,8 +914,8 @@ if sys.platform != "win32": def forkpty() -> tuple[int, int]: ... # some flavors of Unix def killpg(pgid: int, signal: int, /) -> None: ... def nice(increment: int, /) -> int: ... - if sys.platform != "darwin": - def plock(op: int, /) -> None: ... # ???op is int? + if sys.platform != "darwin" and sys.platform != "linux": + def plock(op: int, /) -> None: ... class _wrap_close(_TextIOWrapper): def __init__(self, stream: _TextIOWrapper, proc: Popen[str]) -> None: ... @@ -1141,16 +1141,16 @@ if sys.version_info >= (3, 10) and sys.platform == "linux": if sys.version_info >= (3, 12) and sys.platform == "linux": CLONE_FILES: int CLONE_FS: int - CLONE_NEWCGROUP: int - CLONE_NEWIPC: int - CLONE_NEWNET: int + CLONE_NEWCGROUP: int # Linux 4.6+ + CLONE_NEWIPC: int # Linux 2.6.19+ + CLONE_NEWNET: int # Linux 2.6.24+ CLONE_NEWNS: int - CLONE_NEWPID: int - CLONE_NEWTIME: int - CLONE_NEWUSER: int - CLONE_NEWUTS: int + CLONE_NEWPID: int # Linux 3.8+ + CLONE_NEWTIME: int # Linux 5.6+ + CLONE_NEWUSER: int # Linux 3.8+ + CLONE_NEWUTS: int # Linux 2.6.19+ CLONE_SIGHAND: int - CLONE_SYSVSEM: int + CLONE_SYSVSEM: int # Linux 2.6.26+ CLONE_THREAD: int CLONE_VM: int def unshare(flags: int) -> None: ... diff --git a/mypy/typeshed/stdlib/pathlib.pyi b/mypy/typeshed/stdlib/pathlib.pyi index 0013e221f2e1b..c8c8dde0f33e3 100644 --- a/mypy/typeshed/stdlib/pathlib.pyi +++ b/mypy/typeshed/stdlib/pathlib.pyi @@ -1,4 +1,5 @@ import sys +import types from _typeshed import ( OpenBinaryMode, OpenBinaryModeReading, @@ -14,7 +15,7 @@ from collections.abc import Callable, Generator, Iterator, Sequence from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper from os import PathLike, stat_result from types import TracebackType -from typing import IO, Any, BinaryIO, Literal, overload +from typing import IO, Any, BinaryIO, ClassVar, Literal, overload from typing_extensions import Self, deprecated if sys.version_info >= (3, 9): @@ -22,7 +23,14 @@ if sys.version_info >= (3, 9): __all__ = ["PurePath", "PurePosixPath", "PureWindowsPath", "Path", "PosixPath", "WindowsPath"] +if sys.version_info >= (3, 13): + __all__ += ["UnsupportedOperation"] + class PurePath(PathLike[str]): + if sys.version_info >= (3, 13): + parser: ClassVar[types.ModuleType] + def full_match(self, pattern: StrPath, *, case_sensitive: bool | None = None) -> bool: ... + @property def parts(self) -> tuple[str, ...]: ... @property @@ -94,8 +102,6 @@ class PureWindowsPath(PurePath): ... class Path(PurePath): def __new__(cls, *args: StrPath, **kwargs: Any) -> Self: ... - def __enter__(self) -> Self: ... - def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... @classmethod def cwd(cls) -> Self: ... if sys.version_info >= (3, 10): @@ -105,17 +111,38 @@ class Path(PurePath): def stat(self) -> stat_result: ... def chmod(self, mode: int) -> None: ... - if sys.version_info >= (3, 12): - def exists(self, *, follow_symlinks: bool = True) -> bool: ... + if sys.version_info >= (3, 13): + @classmethod + def from_uri(cls, uri: str) -> Path: ... + def is_dir(self, *, follow_symlinks: bool = True) -> bool: ... + def is_file(self, *, follow_symlinks: bool = True) -> bool: ... + def read_text(self, encoding: str | None = None, errors: str | None = None, newline: str | None = None) -> str: ... + else: + def __enter__(self) -> Self: ... + def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... + def is_dir(self) -> bool: ... + def is_file(self) -> bool: ... + def read_text(self, encoding: str | None = None, errors: str | None = None) -> str: ... + + if sys.version_info >= (3, 13): + def glob( + self, pattern: str, *, case_sensitive: bool | None = None, recurse_symlinks: bool = False + ) -> Generator[Self, None, None]: ... + def rglob( + self, pattern: str, *, case_sensitive: bool | None = None, recurse_symlinks: bool = False + ) -> Generator[Self, None, None]: ... + elif sys.version_info >= (3, 12): def glob(self, pattern: str, *, case_sensitive: bool | None = None) -> Generator[Self, None, None]: ... def rglob(self, pattern: str, *, case_sensitive: bool | None = None) -> Generator[Self, None, None]: ... else: - def exists(self) -> bool: ... def glob(self, pattern: str) -> Generator[Self, None, None]: ... def rglob(self, pattern: str) -> Generator[Self, None, None]: ... - def is_dir(self) -> bool: ... - def is_file(self) -> bool: ... + if sys.version_info >= (3, 12): + def exists(self, *, follow_symlinks: bool = True) -> bool: ... + else: + def exists(self) -> bool: ... + def is_symlink(self) -> bool: ... def is_socket(self) -> bool: ... def is_fifo(self) -> bool: ... @@ -186,8 +213,12 @@ class Path(PurePath): if sys.platform != "win32": # These methods do "exist" on Windows, but they always raise NotImplementedError, # so it's safer to pretend they don't exist - def owner(self) -> str: ... - def group(self) -> str: ... + if sys.version_info >= (3, 13): + def owner(self, *, follow_symlinks: bool = True) -> str: ... + def group(self, *, follow_symlinks: bool = True) -> str: ... + else: + def owner(self) -> str: ... + def group(self) -> str: ... # This method does "exist" on Windows on <3.12, but always raises NotImplementedError # On py312+, it works properly on Windows, as with all other platforms @@ -212,7 +243,6 @@ class Path(PurePath): def absolute(self) -> Self: ... def expanduser(self) -> Self: ... def read_bytes(self) -> bytes: ... - def read_text(self, encoding: str | None = None, errors: str | None = None) -> str: ... def samefile(self, other_path: StrPath) -> bool: ... def write_bytes(self, data: ReadableBuffer) -> int: ... if sys.version_info >= (3, 10): @@ -234,3 +264,6 @@ class Path(PurePath): class PosixPath(Path, PurePosixPath): ... class WindowsPath(Path, PureWindowsPath): ... + +if sys.version_info >= (3, 13): + class UnsupportedOperation(NotImplementedError): ... diff --git a/mypy/typeshed/stdlib/platform.pyi b/mypy/typeshed/stdlib/platform.pyi index f0e6d4123e1dc..c47ecdc51df49 100644 --- a/mypy/typeshed/stdlib/platform.pyi +++ b/mypy/typeshed/stdlib/platform.pyi @@ -40,3 +40,28 @@ def platform(aliased: bool = ..., terse: bool = ...) -> str: ... if sys.version_info >= (3, 10): def freedesktop_os_release() -> dict[str, str]: ... + +if sys.version_info >= (3, 13): + class AndroidVer(NamedTuple): + release: str + api_level: int + manufacturer: str + model: str + device: str + is_emulator: bool + + class IOSVersionInfo(NamedTuple): + system: str + release: str + model: str + is_simulator: bool + + def android_ver( + release: str = "", + api_level: int = 0, + manufacturer: str = "", + model: str = "", + device: str = "", + is_emulator: bool = False, + ) -> AndroidVer: ... + def ios_ver(system: str = "", release: str = "", model: str = "", is_simulator: bool = False) -> IOSVersionInfo: ... diff --git a/mypy/typeshed/stdlib/shutil.pyi b/mypy/typeshed/stdlib/shutil.pyi index f6c8a390d85f5..dcff18d110bd8 100644 --- a/mypy/typeshed/stdlib/shutil.pyi +++ b/mypy/typeshed/stdlib/shutil.pyi @@ -78,13 +78,25 @@ class _RmtreeType(Protocol): avoids_symlink_attacks: bool if sys.version_info >= (3, 12): @overload - @deprecated("The `onerror` parameter is deprecated and will be removed in Python 3.14. Use `onexc` instead.") + @deprecated("The `onerror` parameter is deprecated. Use `onexc` instead.") + def __call__( + self, + path: StrOrBytesPath, + ignore_errors: bool, + onerror: _OnErrorCallback, + *, + onexc: None = None, + dir_fd: int | None = None, + ) -> None: ... + @overload + @deprecated("The `onerror` parameter is deprecated. Use `onexc` instead.") def __call__( self, path: StrOrBytesPath, ignore_errors: bool = False, - onerror: _OnErrorCallback | None = None, *, + onerror: _OnErrorCallback, + onexc: None = None, dir_fd: int | None = None, ) -> None: ... @overload diff --git a/mypy/typeshed/stdlib/spwd.pyi b/mypy/typeshed/stdlib/spwd.pyi index 67ad3bfc751b8..3a5d39997dcc7 100644 --- a/mypy/typeshed/stdlib/spwd.pyi +++ b/mypy/typeshed/stdlib/spwd.pyi @@ -36,6 +36,11 @@ if sys.platform != "win32": def sp_expire(self) -> int: ... @property def sp_flag(self) -> int: ... + # Deprecated aliases below. + @property + def sp_nam(self) -> str: ... + @property + def sp_pwd(self) -> str: ... def getspall() -> list[struct_spwd]: ... def getspnam(arg: str, /) -> struct_spwd: ... diff --git a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi index 068ce1514c3c5..3cb4b93e88fe1 100644 --- a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi +++ b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi @@ -428,7 +428,11 @@ class Connection: def executemany(self, sql: str, parameters: Iterable[_Parameters], /) -> Cursor: ... def executescript(self, sql_script: str, /) -> Cursor: ... def interrupt(self) -> None: ... - def iterdump(self) -> Generator[str, None, None]: ... + if sys.version_info >= (3, 13): + def iterdump(self, *, filter: str | None = None) -> Generator[str, None, None]: ... + else: + def iterdump(self) -> Generator[str, None, None]: ... + def rollback(self) -> None: ... def set_authorizer( self, authorizer_callback: Callable[[int, str | None, str | None, str | None, str | None], int] | None diff --git a/mypy/typeshed/stdlib/tarfile.pyi b/mypy/typeshed/stdlib/tarfile.pyi index b6fe454eff78a..e520994641744 100644 --- a/mypy/typeshed/stdlib/tarfile.pyi +++ b/mypy/typeshed/stdlib/tarfile.pyi @@ -142,22 +142,43 @@ class TarFile: errorlevel: int | None offset: int # undocumented extraction_filter: _FilterFunction | None - def __init__( - self, - name: StrOrBytesPath | None = None, - mode: Literal["r", "a", "w", "x"] = "r", - fileobj: _Fileobj | None = None, - format: int | None = None, - tarinfo: type[TarInfo] | None = None, - dereference: bool | None = None, - ignore_zeros: bool | None = None, - encoding: str | None = None, - errors: str = "surrogateescape", - pax_headers: Mapping[str, str] | None = None, - debug: int | None = None, - errorlevel: int | None = None, - copybufsize: int | None = None, # undocumented - ) -> None: ... + if sys.version_info >= (3, 13): + stream: bool + def __init__( + self, + name: StrOrBytesPath | None = None, + mode: Literal["r", "a", "w", "x"] = "r", + fileobj: _Fileobj | None = None, + format: int | None = None, + tarinfo: type[TarInfo] | None = None, + dereference: bool | None = None, + ignore_zeros: bool | None = None, + encoding: str | None = None, + errors: str = "surrogateescape", + pax_headers: Mapping[str, str] | None = None, + debug: int | None = None, + errorlevel: int | None = None, + copybufsize: int | None = None, # undocumented + stream: bool = False, + ) -> None: ... + else: + def __init__( + self, + name: StrOrBytesPath | None = None, + mode: Literal["r", "a", "w", "x"] = "r", + fileobj: _Fileobj | None = None, + format: int | None = None, + tarinfo: type[TarInfo] | None = None, + dereference: bool | None = None, + ignore_zeros: bool | None = None, + encoding: str | None = None, + errors: str = "surrogateescape", + pax_headers: Mapping[str, str] | None = None, + debug: int | None = None, + errorlevel: int | None = None, + copybufsize: int | None = None, # undocumented + ) -> None: ... + def __enter__(self) -> Self: ... def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None diff --git a/mypy/typeshed/stdlib/telnetlib.pyi b/mypy/typeshed/stdlib/telnetlib.pyi index d244d54f2fbf2..294a1cb12b63e 100644 --- a/mypy/typeshed/stdlib/telnetlib.pyi +++ b/mypy/typeshed/stdlib/telnetlib.pyi @@ -88,6 +88,7 @@ NOOPT: bytes class Telnet: host: str | None # undocumented + sock: socket.socket | None # undocumented def __init__(self, host: str | None = None, port: int = 0, timeout: float = ...) -> None: ... def open(self, host: str, port: int = 0, timeout: float = ...) -> None: ... def msg(self, msg: str, *args: Any) -> None: ... diff --git a/mypy/typeshed/stdlib/time.pyi b/mypy/typeshed/stdlib/time.pyi index b7962f0751d60..71cdc4d78fdc4 100644 --- a/mypy/typeshed/stdlib/time.pyi +++ b/mypy/typeshed/stdlib/time.pyi @@ -27,6 +27,9 @@ if sys.platform != "win32": if sys.platform == "darwin": CLOCK_UPTIME_RAW: int + if sys.version_info >= (3, 13): + CLOCK_UPTIME_RAW_APPROX: int + CLOCK_MONOTONIC_RAW_APPROX: int if sys.version_info >= (3, 9) and sys.platform == "linux": CLOCK_TAI: int @@ -94,7 +97,7 @@ if sys.platform != "win32": def clock_settime(clk_id: int, time: float, /) -> None: ... # Unix only if sys.platform != "win32": - def clock_gettime_ns(clock_id: int, /) -> int: ... + def clock_gettime_ns(clk_id: int, /) -> int: ... def clock_settime_ns(clock_id: int, time: int, /) -> int: ... if sys.platform == "linux": diff --git a/mypy/typeshed/stdlib/traceback.pyi b/mypy/typeshed/stdlib/traceback.pyi index 39803003cfe55..075c0f4b9de8f 100644 --- a/mypy/typeshed/stdlib/traceback.pyi +++ b/mypy/typeshed/stdlib/traceback.pyi @@ -3,7 +3,7 @@ from _typeshed import SupportsWrite, Unused from collections.abc import Generator, Iterable, Iterator, Mapping from types import FrameType, TracebackType from typing import Any, Literal, overload -from typing_extensions import Self, TypeAlias +from typing_extensions import Self, TypeAlias, deprecated __all__ = [ "extract_stack", @@ -85,7 +85,13 @@ def format_list(extracted_list: Iterable[FrameSummary | _FrameSummaryTuple]) -> # undocumented def print_list(extracted_list: Iterable[FrameSummary | _FrameSummaryTuple], file: SupportsWrite[str] | None = None) -> None: ... -if sys.version_info >= (3, 10): +if sys.version_info >= (3, 13): + @overload + def format_exception_only(exc: BaseException | None, /, *, show_group: bool = False) -> list[str]: ... + @overload + def format_exception_only(exc: Unused, /, value: BaseException | None, *, show_group: bool = False) -> list[str]: ... + +elif sys.version_info >= (3, 10): @overload def format_exception_only(exc: BaseException | None, /) -> list[str]: ... @overload @@ -111,13 +117,20 @@ class TracebackException: __context__: TracebackException __suppress_context__: bool stack: StackSummary - exc_type: type[BaseException] filename: str lineno: int text: str offset: int msg: str - if sys.version_info >= (3, 11): + if sys.version_info >= (3, 13): + @property + def exc_type_str(self) -> str: ... + @property + @deprecated("Deprecated in 3.13. Use exc_type_str instead.") + def exc_type(self) -> type[BaseException] | None: ... + else: + exc_type: type[BaseException] + if sys.version_info >= (3, 13): def __init__( self, exc_type: type[BaseException], @@ -130,12 +143,15 @@ class TracebackException: compact: bool = False, max_group_width: int = 15, max_group_depth: int = 10, + save_exc_type: bool = True, _seen: set[int] | None = None, ) -> None: ... - @classmethod - def from_exception( - cls, - exc: BaseException, + elif sys.version_info >= (3, 11): + def __init__( + self, + exc_type: type[BaseException], + exc_value: BaseException, + exc_traceback: TracebackType | None, *, limit: int | None = None, lookup_lines: bool = True, @@ -143,7 +159,8 @@ class TracebackException: compact: bool = False, max_group_width: int = 15, max_group_depth: int = 10, - ) -> Self: ... + _seen: set[int] | None = None, + ) -> None: ... elif sys.version_info >= (3, 10): def __init__( self, @@ -157,6 +174,20 @@ class TracebackException: compact: bool = False, _seen: set[int] | None = None, ) -> None: ... + else: + def __init__( + self, + exc_type: type[BaseException], + exc_value: BaseException, + exc_traceback: TracebackType | None, + *, + limit: int | None = None, + lookup_lines: bool = True, + capture_locals: bool = False, + _seen: set[int] | None = None, + ) -> None: ... + + if sys.version_info >= (3, 11): @classmethod def from_exception( cls, @@ -166,19 +197,21 @@ class TracebackException: lookup_lines: bool = True, capture_locals: bool = False, compact: bool = False, + max_group_width: int = 15, + max_group_depth: int = 10, ) -> Self: ... - else: - def __init__( - self, - exc_type: type[BaseException], - exc_value: BaseException, - exc_traceback: TracebackType | None, + elif sys.version_info >= (3, 10): + @classmethod + def from_exception( + cls, + exc: BaseException, *, limit: int | None = None, lookup_lines: bool = True, capture_locals: bool = False, - _seen: set[int] | None = None, - ) -> None: ... + compact: bool = False, + ) -> Self: ... + else: @classmethod def from_exception( cls, exc: BaseException, *, limit: int | None = None, lookup_lines: bool = True, capture_locals: bool = False @@ -190,7 +223,10 @@ class TracebackException: else: def format(self, *, chain: bool = True) -> Generator[str, None, None]: ... - def format_exception_only(self) -> Generator[str, None, None]: ... + if sys.version_info >= (3, 13): + def format_exception_only(self, *, show_group: bool = False, _depth: int = 0) -> Generator[str, None, None]: ... + else: + def format_exception_only(self) -> Generator[str, None, None]: ... if sys.version_info >= (3, 11): def print(self, *, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: ... diff --git a/mypy/typeshed/stdlib/types.pyi b/mypy/typeshed/stdlib/types.pyi index 93cb890463660..9e9dc56b85299 100644 --- a/mypy/typeshed/stdlib/types.pyi +++ b/mypy/typeshed/stdlib/types.pyi @@ -81,7 +81,7 @@ class FunctionType: __name__: str __qualname__: str __annotations__: dict[str, Any] - __kwdefaults__: dict[str, Any] + __kwdefaults__: dict[str, Any] | None if sys.version_info >= (3, 10): @property def __builtins__(self) -> dict[str, Any]: ... @@ -358,6 +358,8 @@ class GeneratorType(Generator[_YieldT_co, _SendT_contra, _ReturnT_co]): ) -> _YieldT_co: ... @overload def throw(self, typ: BaseException, val: None = None, tb: TracebackType | None = ..., /) -> _YieldT_co: ... + if sys.version_info >= (3, 13): + def __class_getitem__(cls, item: Any, /) -> Any: ... @final class AsyncGeneratorType(AsyncGenerator[_YieldT_co, _SendT_contra]): @@ -401,6 +403,8 @@ class CoroutineType(Coroutine[_YieldT_co, _SendT_contra, _ReturnT_co]): ) -> _YieldT_co: ... @overload def throw(self, typ: BaseException, val: None = None, tb: TracebackType | None = ..., /) -> _YieldT_co: ... + if sys.version_info >= (3, 13): + def __class_getitem__(cls, item: Any, /) -> Any: ... @final class MethodType: @@ -587,6 +591,9 @@ if sys.version_info >= (3, 9): def __unpacked__(self) -> bool: ... @property def __typing_unpacked_tuple_args__(self) -> tuple[Any, ...] | None: ... + if sys.version_info >= (3, 10): + def __or__(self, value: Any, /) -> UnionType: ... + def __ror__(self, value: Any, /) -> UnionType: ... # GenericAlias delegates attr access to `__origin__` def __getattr__(self, name: str) -> Any: ... diff --git a/mypy/typeshed/stdlib/typing.pyi b/mypy/typeshed/stdlib/typing.pyi index 1b021d1eecbda..92427f91f022a 100644 --- a/mypy/typeshed/stdlib/typing.pyi +++ b/mypy/typeshed/stdlib/typing.pyi @@ -21,7 +21,7 @@ from types import ( TracebackType, WrapperDescriptorType, ) -from typing_extensions import Never as _Never, ParamSpec as _ParamSpec +from typing_extensions import Never as _Never, ParamSpec as _ParamSpec, deprecated if sys.version_info >= (3, 9): from types import GenericAlias @@ -129,7 +129,7 @@ if sys.version_info >= (3, 12): __all__ += ["TypeAliasType", "override"] if sys.version_info >= (3, 13): - __all__ += ["get_protocol_members", "is_protocol", "NoDefault"] + __all__ += ["get_protocol_members", "is_protocol", "NoDefault", "TypeIs", "ReadOnly"] Any = object() @@ -183,6 +183,7 @@ class TypeVar: if sys.version_info >= (3, 11): def __typing_subst__(self, arg: Any) -> Any: ... if sys.version_info >= (3, 13): + def __typing_prepare_subst__(self, alias: Any, args: Any) -> tuple[Any, ...]: ... def has_default(self) -> bool: ... # Used for an undocumented mypy feature. Does not exist at runtime. @@ -989,7 +990,35 @@ class ForwardRef: else: def __init__(self, arg: str, is_argument: bool = True) -> None: ... - if sys.version_info >= (3, 9): + if sys.version_info >= (3, 13): + @overload + @deprecated( + "Failing to pass a value to the 'type_params' parameter of ForwardRef._evaluate() is deprecated, " + "as it leads to incorrect behaviour when evaluating a stringified annotation " + "that references a PEP 695 type parameter. It will be disallowed in Python 3.15." + ) + def _evaluate( + self, globalns: dict[str, Any] | None, localns: dict[str, Any] | None, *, recursive_guard: frozenset[str] + ) -> Any | None: ... + @overload + def _evaluate( + self, + globalns: dict[str, Any] | None, + localns: dict[str, Any] | None, + type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...], + *, + recursive_guard: frozenset[str], + ) -> Any | None: ... + elif sys.version_info >= (3, 12): + def _evaluate( + self, + globalns: dict[str, Any] | None, + localns: dict[str, Any] | None, + type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] | None = None, + *, + recursive_guard: frozenset[str], + ) -> Any | None: ... + elif sys.version_info >= (3, 9): def _evaluate( self, globalns: dict[str, Any] | None, localns: dict[str, Any] | None, recursive_guard: frozenset[str] ) -> Any | None: ... @@ -1036,3 +1065,5 @@ if sys.version_info >= (3, 13): class _NoDefaultType: ... NoDefault: _NoDefaultType + TypeIs: _SpecialForm + ReadOnly: _SpecialForm diff --git a/mypy/typeshed/stdlib/typing_extensions.pyi b/mypy/typeshed/stdlib/typing_extensions.pyi index 73fd2dc8cbb33..a7d2b2c2e0835 100644 --- a/mypy/typeshed/stdlib/typing_extensions.pyi +++ b/mypy/typeshed/stdlib/typing_extensions.pyi @@ -419,6 +419,8 @@ if sys.version_info >= (3, 13): from typing import ( NoDefault as NoDefault, ParamSpec as ParamSpec, + ReadOnly as ReadOnly, + TypeIs as TypeIs, TypeVar as TypeVar, TypeVarTuple as TypeVarTuple, get_protocol_members as get_protocol_members, @@ -520,11 +522,11 @@ else: def has_default(self) -> bool: ... def __typing_prepare_subst__(self, alias: Any, args: Any) -> tuple[Any, ...]: ... + ReadOnly: _SpecialForm + TypeIs: _SpecialForm + class Doc: documentation: str def __init__(self, documentation: str, /) -> None: ... def __hash__(self) -> int: ... def __eq__(self, other: object) -> bool: ... - -ReadOnly: _SpecialForm -TypeIs: _SpecialForm diff --git a/mypy/typeshed/stdlib/weakref.pyi b/mypy/typeshed/stdlib/weakref.pyi index e345124237dad..aaba7ffc98d95 100644 --- a/mypy/typeshed/stdlib/weakref.pyi +++ b/mypy/typeshed/stdlib/weakref.pyi @@ -41,7 +41,10 @@ _P = ParamSpec("_P") ProxyTypes: tuple[type[Any], ...] class WeakMethod(ref[_CallableT]): - def __new__(cls, meth: _CallableT, callback: Callable[[Self], object] | None = None) -> Self: ... + # `ref` is implemented in `C` so positional-only arguments are enforced, but not in `WeakMethod`. + def __new__( # pyright: ignore[reportInconsistentConstructor] + cls, meth: _CallableT, callback: Callable[[Self], Any] | None = None + ) -> Self: ... def __call__(self) -> _CallableT | None: ... def __eq__(self, other: object) -> bool: ... def __ne__(self, other: object) -> bool: ... diff --git a/mypy/typeshed/stdlib/xml/sax/handler.pyi b/mypy/typeshed/stdlib/xml/sax/handler.pyi index 30fe31d513742..7b7c69048efd1 100644 --- a/mypy/typeshed/stdlib/xml/sax/handler.pyi +++ b/mypy/typeshed/stdlib/xml/sax/handler.pyi @@ -14,7 +14,7 @@ class ContentHandler: def startDocument(self) -> None: ... def endDocument(self) -> None: ... def startPrefixMapping(self, prefix: str | None, uri: str) -> None: ... - def endPrefixMapping(self, prefix) -> None: ... + def endPrefixMapping(self, prefix: str | None) -> None: ... def startElement(self, name: str, attrs: xmlreader.AttributesImpl) -> None: ... def endElement(self, name: str) -> None: ... def startElementNS(self, name: tuple[str, str], qname: str, attrs: xmlreader.AttributesNSImpl) -> None: ... diff --git a/mypyc/codegen/emitfunc.py b/mypyc/codegen/emitfunc.py index 12f57b9cee6f3..d945a28d84817 100644 --- a/mypyc/codegen/emitfunc.py +++ b/mypyc/codegen/emitfunc.py @@ -6,7 +6,14 @@ from mypyc.analysis.blockfreq import frequently_executed_blocks from mypyc.codegen.emit import DEBUG_ERRORS, Emitter, TracebackAndGotoHandler, c_array_initializer -from mypyc.common import MODULE_PREFIX, NATIVE_PREFIX, REG_PREFIX, STATIC_PREFIX, TYPE_PREFIX +from mypyc.common import ( + MODULE_PREFIX, + NATIVE_PREFIX, + REG_PREFIX, + STATIC_PREFIX, + TYPE_PREFIX, + TYPE_VAR_PREFIX, +) from mypyc.ir.class_ir import ClassIR from mypyc.ir.func_ir import FUNC_CLASSMETHOD, FUNC_STATICMETHOD, FuncDecl, FuncIR, all_values from mypyc.ir.ops import ( @@ -14,6 +21,7 @@ NAMESPACE_MODULE, NAMESPACE_STATIC, NAMESPACE_TYPE, + NAMESPACE_TYPE_VAR, Assign, AssignMulti, BasicBlock, @@ -477,6 +485,7 @@ def visit_set_attr(self, op: SetAttr) -> None: NAMESPACE_STATIC: STATIC_PREFIX, NAMESPACE_TYPE: TYPE_PREFIX, NAMESPACE_MODULE: MODULE_PREFIX, + NAMESPACE_TYPE_VAR: TYPE_VAR_PREFIX, } def visit_load_static(self, op: LoadStatic) -> None: diff --git a/mypyc/codegen/emitmodule.py b/mypyc/codegen/emitmodule.py index 6c8f5ac91335e..1d8708912de5d 100644 --- a/mypyc/codegen/emitmodule.py +++ b/mypyc/codegen/emitmodule.py @@ -41,6 +41,7 @@ PREFIX, RUNTIME_C_FILES, TOP_LEVEL_NAME, + TYPE_VAR_PREFIX, shared_lib_name, short_id_from_name, use_vectorcall, @@ -590,6 +591,7 @@ def generate_c_for_modules(self) -> list[tuple[str, str]]: self.declare_finals(module_name, module.final_names, declarations) for cl in module.classes: generate_class_type_decl(cl, emitter, ext_declarations, declarations) + self.declare_type_vars(module_name, module.type_var_names, declarations) for fn in module.functions: generate_function_declaration(fn, declarations) @@ -1063,6 +1065,15 @@ def declare_static_pyobject(self, identifier: str, emitter: Emitter) -> None: symbol = emitter.static_name(identifier, None) self.declare_global("PyObject *", symbol) + def declare_type_vars(self, module: str, type_var_names: list[str], emitter: Emitter) -> None: + for name in type_var_names: + static_name = emitter.static_name(name, module, prefix=TYPE_VAR_PREFIX) + emitter.context.declarations[static_name] = HeaderDeclaration( + f"PyObject *{static_name};", + [f"PyObject *{static_name} = NULL;"], + needs_export=False, + ) + def sort_classes(classes: list[tuple[str, ClassIR]]) -> list[tuple[str, ClassIR]]: mod_name = {ir: name for name, ir in classes} diff --git a/mypyc/common.py b/mypyc/common.py index 3d07f6c3d0d32..31567c689c348 100644 --- a/mypyc/common.py +++ b/mypyc/common.py @@ -13,6 +13,7 @@ STATIC_PREFIX: Final = "CPyStatic_" # Static variables (for literals etc.) TYPE_PREFIX: Final = "CPyType_" # Type object struct MODULE_PREFIX: Final = "CPyModule_" # Cached modules +TYPE_VAR_PREFIX: Final = "CPyTypeVar_" # Type variables when using new-style Python 3.12 syntax ATTR_PREFIX: Final = "_" # Attributes ENV_ATTR_NAME: Final = "__mypyc_env__" @@ -78,6 +79,7 @@ "exc_ops.c", "misc_ops.c", "generic_ops.c", + "pythonsupport.c", ] diff --git a/mypyc/ir/module_ir.py b/mypyc/ir/module_ir.py index dcf6f87685474..e3b240629edab 100644 --- a/mypyc/ir/module_ir.py +++ b/mypyc/ir/module_ir.py @@ -21,12 +21,17 @@ def __init__( functions: list[FuncIR], classes: list[ClassIR], final_names: list[tuple[str, RType]], + type_var_names: list[str], ) -> None: self.fullname = fullname self.imports = imports.copy() self.functions = functions self.classes = classes self.final_names = final_names + # Names of C statics used for Python 3.12 type variable objects. + # These are only visible in the module that defined them, so no need + # to serialize. + self.type_var_names = type_var_names def serialize(self) -> JsonDict: return { @@ -45,6 +50,7 @@ def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> ModuleIR: [ctx.functions[FuncDecl.get_id_from_json(f)] for f in data["functions"]], [ClassIR.deserialize(c, ctx) for c in data["classes"]], [(k, deserialize_type(t, ctx)) for k, t in data["final_names"]], + [], ) diff --git a/mypyc/ir/ops.py b/mypyc/ir/ops.py index 377266e797d9c..896ba3ac091c9 100644 --- a/mypyc/ir/ops.py +++ b/mypyc/ir/ops.py @@ -789,6 +789,9 @@ def accept(self, visitor: OpVisitor[T]) -> T: # Namespace for modules NAMESPACE_MODULE: Final = "module" +# Namespace for Python 3.12 type variable objects (implicitly created TypeVar instances, etc.) +NAMESPACE_TYPE_VAR: Final = "typevar" + class LoadStatic(RegisterOp): """Load a static name (name :: static). diff --git a/mypyc/irbuild/builder.py b/mypyc/irbuild/builder.py index cca771e82c83f..a9e1ce4719537 100644 --- a/mypyc/irbuild/builder.py +++ b/mypyc/irbuild/builder.py @@ -24,6 +24,9 @@ ARG_POS, GDEF, LDEF, + PARAM_SPEC_KIND, + TYPE_VAR_KIND, + TYPE_VAR_TUPLE_KIND, ArgKind, CallExpr, Decorator, @@ -44,6 +47,7 @@ TupleExpr, TypeAlias, TypeInfo, + TypeParam, UnaryExpr, Var, ) @@ -69,6 +73,7 @@ from mypyc.ir.func_ir import INVALID_FUNC_DEF, FuncDecl, FuncIR, FuncSignature, RuntimeArg from mypyc.ir.ops import ( NAMESPACE_MODULE, + NAMESPACE_TYPE_VAR, Assign, BasicBlock, Branch, @@ -179,6 +184,7 @@ def __init__( self.function_names: set[tuple[str | None, str]] = set() self.classes: list[ClassIR] = [] self.final_names: list[tuple[str, RType]] = [] + self.type_var_names: list[str] = [] self.callable_class_names: set[str] = set() self.options = options @@ -541,6 +547,21 @@ def load_final_static( error_msg=f'value for final name "{error_name}" was not set', ) + def init_type_var(self, value: Value, name: str, line: int) -> None: + unique_name = name + "___" + str(line) + self.type_var_names.append(unique_name) + self.add(InitStatic(value, unique_name, self.module_name, namespace=NAMESPACE_TYPE_VAR)) + + def load_type_var(self, name: str, line: int) -> Value: + return self.add( + LoadStatic( + object_rprimitive, + name + "___" + str(line), + self.module_name, + namespace=NAMESPACE_TYPE_VAR, + ) + ) + def load_literal_value(self, val: int | str | bytes | float | complex | bool) -> Value: """Load value of a final name, class-level attribute, or constant folded expression.""" if isinstance(val, bool): @@ -1392,3 +1413,45 @@ def get_call_target_fullname(ref: RefExpr) -> str: if isinstance(target, Instance): return target.type.fullname return ref.fullname + + +def create_type_params( + builder: IRBuilder, typing_mod: Value, type_args: list[TypeParam], line: int +) -> list[Value]: + """Create objects representing various kinds of Python 3.12 type parameters. + + The "typing_mod" argument is the "_typing" module object. The type objects + are looked up from it. + + The returned list has one item for each "type_args" item, in the same order. + Each item is either a TypeVar, TypeVarTuple or ParamSpec instance. + """ + tvs = [] + type_var_imported: Value | None = None + for type_param in type_args: + if type_param.kind == TYPE_VAR_KIND: + if type_var_imported: + # Reuse previously imported value as a minor optimization + tvt = type_var_imported + else: + tvt = builder.py_get_attr(typing_mod, "TypeVar", line) + type_var_imported = tvt + elif type_param.kind == TYPE_VAR_TUPLE_KIND: + tvt = builder.py_get_attr(typing_mod, "TypeVarTuple", line) + else: + assert type_param.kind == PARAM_SPEC_KIND + tvt = builder.py_get_attr(typing_mod, "ParamSpec", line) + if type_param.kind != TYPE_VAR_TUPLE_KIND: + # To match runtime semantics, pass infer_variance=True + tv = builder.py_call( + tvt, + [builder.load_str(type_param.name), builder.true()], + line, + arg_kinds=[ARG_POS, ARG_NAMED], + arg_names=[None, "infer_variance"], + ) + else: + tv = builder.py_call(tvt, [builder.load_str(type_param.name)], line) + builder.init_type_var(tv, type_param.name, line) + tvs.append(tv) + return tvs diff --git a/mypyc/irbuild/classdef.py b/mypyc/irbuild/classdef.py index 3f6ec0f33822e..2152da099e819 100644 --- a/mypyc/irbuild/classdef.py +++ b/mypyc/irbuild/classdef.py @@ -7,6 +7,7 @@ from typing import Callable, Final from mypy.nodes import ( + TYPE_VAR_TUPLE_KIND, AssignmentStmt, CallExpr, ClassDef, @@ -22,6 +23,7 @@ StrExpr, TempNode, TypeInfo, + TypeParam, is_class_var, ) from mypy.types import ENUM_REMOVED_PROPS, Instance, RawExpressionType, get_proper_type @@ -53,7 +55,7 @@ is_optional_type, object_rprimitive, ) -from mypyc.irbuild.builder import IRBuilder +from mypyc.irbuild.builder import IRBuilder, create_type_params from mypyc.irbuild.function import ( gen_property_getter_ir, gen_property_setter_ir, @@ -63,9 +65,16 @@ ) from mypyc.irbuild.util import dataclass_type, get_func_def, is_constant, is_dataclass_decorator from mypyc.primitives.dict_ops import dict_new_op, dict_set_item_op -from mypyc.primitives.generic_ops import py_hasattr_op, py_setattr_op +from mypyc.primitives.generic_ops import ( + iter_op, + next_op, + py_get_item_op, + py_hasattr_op, + py_setattr_op, +) from mypyc.primitives.misc_ops import ( dataclass_sleight_of_hand, + import_op, not_implemented_op, py_calc_meta_op, pytype_from_template_op, @@ -405,8 +414,14 @@ def get_type_annotation(self, stmt: AssignmentStmt) -> TypeInfo | None: def allocate_class(builder: IRBuilder, cdef: ClassDef) -> Value: # OK AND NOW THE FUN PART base_exprs = cdef.base_type_exprs + cdef.removed_base_type_exprs - if base_exprs: - bases = [builder.accept(x) for x in base_exprs] + new_style_type_args = cdef.type_args + if new_style_type_args: + bases = [make_generic_base_class(builder, cdef.fullname, new_style_type_args, cdef.line)] + else: + bases = [] + + if base_exprs or new_style_type_args: + bases.extend([builder.accept(x) for x in base_exprs]) tp_bases = builder.new_tuple(bases, cdef.line) else: tp_bases = builder.add(LoadErrorValue(object_rprimitive, is_borrowed=True)) @@ -453,6 +468,30 @@ def allocate_class(builder: IRBuilder, cdef: ClassDef) -> Value: return tp +def make_generic_base_class( + builder: IRBuilder, fullname: str, type_args: list[TypeParam], line: int +) -> Value: + """Construct Generic[...] base class object for a new-style generic class (Python 3.12).""" + mod = builder.call_c(import_op, [builder.load_str("_typing")], line) + tvs = create_type_params(builder, mod, type_args, line) + args = [] + for tv, type_param in zip(tvs, type_args): + if type_param.kind == TYPE_VAR_TUPLE_KIND: + # Evaluate *Ts for a TypeVarTuple + it = builder.call_c(iter_op, [tv], line) + tv = builder.call_c(next_op, [it], line) + args.append(tv) + + gent = builder.py_get_attr(mod, "Generic", line) + if len(args) == 1: + arg = args[0] + else: + arg = builder.new_tuple(args, line) + + base = builder.call_c(py_get_item_op, [gent, arg], line) + return base + + # Mypy uses these internally as base classes of TypedDict classes. These are # lies and don't have any runtime equivalent. MAGIC_TYPED_DICT_CLASSES: Final[tuple[str, ...]] = ( diff --git a/mypyc/irbuild/expression.py b/mypyc/irbuild/expression.py index a16faf6cd7d75..8d7c089e20cd9 100644 --- a/mypyc/irbuild/expression.py +++ b/mypyc/irbuild/expression.py @@ -44,6 +44,7 @@ TupleExpr, TypeApplication, TypeInfo, + TypeVarLikeExpr, UnaryExpr, Var, ) @@ -106,6 +107,10 @@ def transform_name_expr(builder: IRBuilder, expr: NameExpr) -> Value: + if isinstance(expr.node, TypeVarLikeExpr) and expr.node.is_new_style: + # Reference to Python 3.12 implicit TypeVar/TupleVarTuple/... object. + # These are stored in C statics and not visible in Python namespaces. + return builder.load_type_var(expr.node.name, expr.node.line) if expr.node is None: builder.add( RaiseStandardError( diff --git a/mypyc/irbuild/main.py b/mypyc/irbuild/main.py index 85b905393af1a..15928d939cbf6 100644 --- a/mypyc/irbuild/main.py +++ b/mypyc/irbuild/main.py @@ -99,6 +99,7 @@ def build_ir( builder.functions, builder.classes, builder.final_names, + builder.type_var_names, ) result[module.fullname] = module_ir class_irs.extend(builder.classes) diff --git a/mypyc/irbuild/statement.py b/mypyc/irbuild/statement.py index 2c17eb2bb14d0..4d828b1b9d823 100644 --- a/mypyc/irbuild/statement.py +++ b/mypyc/irbuild/statement.py @@ -12,6 +12,8 @@ from typing import Callable, Sequence from mypy.nodes import ( + ARG_NAMED, + ARG_POS, AssertStmt, AssignmentStmt, AwaitExpr, @@ -37,6 +39,7 @@ TempNode, TryStmt, TupleExpr, + TypeAliasStmt, WhileStmt, WithStmt, YieldExpr, @@ -74,7 +77,7 @@ object_rprimitive, ) from mypyc.irbuild.ast_helpers import is_borrow_friendly_expr, process_conditional -from mypyc.irbuild.builder import IRBuilder, int_borrow_friendly_op +from mypyc.irbuild.builder import IRBuilder, create_type_params, int_borrow_friendly_op from mypyc.irbuild.for_helpers import for_loop_helper from mypyc.irbuild.generator import add_raise_exception_blocks_to_generator_class from mypyc.irbuild.nonlocalcontrol import ( @@ -105,7 +108,9 @@ coro_op, import_from_many_op, import_many_op, + import_op, send_op, + set_type_alias_compute_function_op, type_op, yield_from_except_op, ) @@ -1015,3 +1020,30 @@ def transform_await_expr(builder: IRBuilder, o: AwaitExpr) -> Value: def transform_match_stmt(builder: IRBuilder, m: MatchStmt) -> None: m.accept(MatchVisitor(builder, m)) + + +def transform_type_alias_stmt(builder: IRBuilder, s: TypeAliasStmt) -> None: + line = s.line + # Use "_typing" to avoid importing "typing", as the latter can be expensive. + # "_typing" includes everything we need here. + mod = builder.call_c(import_op, [builder.load_str("_typing")], line) + type_params = create_type_params(builder, mod, s.type_args, s.line) + + type_alias_type = builder.py_get_attr(mod, "TypeAliasType", line) + args = [builder.load_str(s.name.name), builder.none()] + arg_names: list[str | None] = [None, None] + arg_kinds = [ARG_POS, ARG_POS] + if s.type_args: + args.append(builder.new_tuple(type_params, line)) + arg_names.append("type_params") + arg_kinds.append(ARG_NAMED) + alias = builder.py_call(type_alias_type, args, line, arg_names=arg_names, arg_kinds=arg_kinds) + + # Use primitive to set function used to lazily compute type alias type value. + # The value needs to be lazily computed to match Python runtime behavior, but + # Python public APIs don't support this, so we use a C primitive. + compute_fn = s.value.accept(builder.visitor) + builder.builder.primitive_op(set_type_alias_compute_function_op, [alias, compute_fn], line) + + target = builder.get_assignment_target(s.name) + builder.assign(target, alias, line) diff --git a/mypyc/irbuild/visitor.py b/mypyc/irbuild/visitor.py index e7256f036e4cb..05a033c3e6ad4 100644 --- a/mypyc/irbuild/visitor.py +++ b/mypyc/irbuild/visitor.py @@ -137,6 +137,7 @@ transform_raise_stmt, transform_return_stmt, transform_try_stmt, + transform_type_alias_stmt, transform_while_stmt, transform_with_stmt, transform_yield_expr, @@ -251,7 +252,7 @@ def visit_match_stmt(self, stmt: MatchStmt) -> None: transform_match_stmt(self.builder, stmt) def visit_type_alias_stmt(self, stmt: TypeAliasStmt) -> None: - self.bail('The "type" statement is not yet supported by mypyc', stmt.line) + transform_type_alias_stmt(self.builder, stmt) # Expressions diff --git a/mypyc/lib-rt/CPy.h b/mypyc/lib-rt/CPy.h index 1a03f049ecb00..2ec04e4c5b5cc 100644 --- a/mypyc/lib-rt/CPy.h +++ b/mypyc/lib-rt/CPy.h @@ -120,46 +120,43 @@ static inline size_t CPy_FindAttrOffset(PyTypeObject *trait, CPyVTableItem *vtab CPyTagged CPyTagged_FromSsize_t(Py_ssize_t value); CPyTagged CPyTagged_FromVoidPtr(void *ptr); CPyTagged CPyTagged_FromInt64(int64_t value); -CPyTagged CPyTagged_FromObject(PyObject *object); -CPyTagged CPyTagged_StealFromObject(PyObject *object); -CPyTagged CPyTagged_BorrowFromObject(PyObject *object); PyObject *CPyTagged_AsObject(CPyTagged x); PyObject *CPyTagged_StealAsObject(CPyTagged x); Py_ssize_t CPyTagged_AsSsize_t(CPyTagged x); void CPyTagged_IncRef(CPyTagged x); void CPyTagged_DecRef(CPyTagged x); void CPyTagged_XDecRef(CPyTagged x); -CPyTagged CPyTagged_Negate(CPyTagged num); -CPyTagged CPyTagged_Invert(CPyTagged num); -CPyTagged CPyTagged_Add(CPyTagged left, CPyTagged right); -CPyTagged CPyTagged_Subtract(CPyTagged left, CPyTagged right); -CPyTagged CPyTagged_Multiply(CPyTagged left, CPyTagged right); -CPyTagged CPyTagged_FloorDivide(CPyTagged left, CPyTagged right); -CPyTagged CPyTagged_Remainder(CPyTagged left, CPyTagged right); -CPyTagged CPyTagged_And(CPyTagged left, CPyTagged right); -CPyTagged CPyTagged_Or(CPyTagged left, CPyTagged right); -CPyTagged CPyTagged_Xor(CPyTagged left, CPyTagged right); -CPyTagged CPyTagged_Rshift(CPyTagged left, CPyTagged right); -CPyTagged CPyTagged_Lshift(CPyTagged left, CPyTagged right); + bool CPyTagged_IsEq_(CPyTagged left, CPyTagged right); bool CPyTagged_IsLt_(CPyTagged left, CPyTagged right); +CPyTagged CPyTagged_Negate_(CPyTagged num); +CPyTagged CPyTagged_Invert_(CPyTagged num); +CPyTagged CPyTagged_Add_(CPyTagged left, CPyTagged right); +CPyTagged CPyTagged_Subtract_(CPyTagged left, CPyTagged right); +CPyTagged CPyTagged_Multiply_(CPyTagged left, CPyTagged right); +CPyTagged CPyTagged_FloorDivide_(CPyTagged left, CPyTagged right); +CPyTagged CPyTagged_Remainder_(CPyTagged left, CPyTagged right); +CPyTagged CPyTagged_BitwiseLongOp_(CPyTagged a, CPyTagged b, char op); +CPyTagged CPyTagged_Rshift_(CPyTagged left, CPyTagged right); +CPyTagged CPyTagged_Lshift_(CPyTagged left, CPyTagged right); + PyObject *CPyTagged_Str(CPyTagged n); CPyTagged CPyTagged_FromFloat(double f); PyObject *CPyLong_FromStrWithBase(PyObject *o, CPyTagged base); PyObject *CPyLong_FromStr(PyObject *o); PyObject *CPyBool_Str(bool b); -int64_t CPyLong_AsInt64(PyObject *o); +int64_t CPyLong_AsInt64_(PyObject *o); int64_t CPyInt64_Divide(int64_t x, int64_t y); int64_t CPyInt64_Remainder(int64_t x, int64_t y); -int32_t CPyLong_AsInt32(PyObject *o); +int32_t CPyLong_AsInt32_(PyObject *o); int32_t CPyInt32_Divide(int32_t x, int32_t y); int32_t CPyInt32_Remainder(int32_t x, int32_t y); void CPyInt32_Overflow(void); -int16_t CPyLong_AsInt16(PyObject *o); +int16_t CPyLong_AsInt16_(PyObject *o); int16_t CPyInt16_Divide(int16_t x, int16_t y); int16_t CPyInt16_Remainder(int16_t x, int16_t y); void CPyInt16_Overflow(void); -uint8_t CPyLong_AsUInt8(PyObject *o); +uint8_t CPyLong_AsUInt8_(PyObject *o); void CPyUInt8_Overflow(void); double CPyTagged_TrueDivide(CPyTagged x, CPyTagged y); @@ -199,6 +196,41 @@ static inline PyObject *CPyTagged_LongAsObject(CPyTagged x) { return (PyObject *)(x & ~CPY_INT_TAG); } +static inline CPyTagged CPyTagged_FromObject(PyObject *object) { + int overflow; + // The overflow check knows about CPyTagged's width + Py_ssize_t value = CPyLong_AsSsize_tAndOverflow(object, &overflow); + if (unlikely(overflow != 0)) { + Py_INCREF(object); + return ((CPyTagged)object) | CPY_INT_TAG; + } else { + return value << 1; + } +} + +static inline CPyTagged CPyTagged_StealFromObject(PyObject *object) { + int overflow; + // The overflow check knows about CPyTagged's width + Py_ssize_t value = CPyLong_AsSsize_tAndOverflow(object, &overflow); + if (unlikely(overflow != 0)) { + return ((CPyTagged)object) | CPY_INT_TAG; + } else { + Py_DECREF(object); + return value << 1; + } +} + +static inline CPyTagged CPyTagged_BorrowFromObject(PyObject *object) { + int overflow; + // The overflow check knows about CPyTagged's width + Py_ssize_t value = CPyLong_AsSsize_tAndOverflow(object, &overflow); + if (unlikely(overflow != 0)) { + return ((CPyTagged)object) | CPY_INT_TAG; + } else { + return value << 1; + } +} + static inline bool CPyTagged_TooBig(Py_ssize_t value) { // Micro-optimized for the common case where it fits. return (size_t)value > CPY_TAGGED_MAX @@ -286,6 +318,245 @@ static inline bool CPyTagged_IsLe(CPyTagged left, CPyTagged right) { } } +static inline int64_t CPyLong_AsInt64(PyObject *o) { + if (likely(PyLong_Check(o))) { + PyLongObject *lobj = (PyLongObject *)o; + Py_ssize_t size = Py_SIZE(lobj); + if (likely(size == 1)) { + // Fast path + return CPY_LONG_DIGIT(lobj, 0); + } else if (likely(size == 0)) { + return 0; + } + } + // Slow path + return CPyLong_AsInt64_(o); +} + +static inline int32_t CPyLong_AsInt32(PyObject *o) { + if (likely(PyLong_Check(o))) { + #if CPY_3_12_FEATURES + PyLongObject *lobj = (PyLongObject *)o; + size_t tag = CPY_LONG_TAG(lobj); + if (likely(tag == (1 << CPY_NON_SIZE_BITS))) { + // Fast path + return CPY_LONG_DIGIT(lobj, 0); + } else if (likely(tag == CPY_SIGN_ZERO)) { + return 0; + } + #else + PyLongObject *lobj = (PyLongObject *)o; + Py_ssize_t size = lobj->ob_base.ob_size; + if (likely(size == 1)) { + // Fast path + return CPY_LONG_DIGIT(lobj, 0); + } else if (likely(size == 0)) { + return 0; + } + #endif + } + // Slow path + return CPyLong_AsInt32_(o); +} + +static inline int16_t CPyLong_AsInt16(PyObject *o) { + if (likely(PyLong_Check(o))) { + #if CPY_3_12_FEATURES + PyLongObject *lobj = (PyLongObject *)o; + size_t tag = CPY_LONG_TAG(lobj); + if (likely(tag == (1 << CPY_NON_SIZE_BITS))) { + // Fast path + digit x = CPY_LONG_DIGIT(lobj, 0); + if (x < 0x8000) + return x; + } else if (likely(tag == CPY_SIGN_ZERO)) { + return 0; + } + #else + PyLongObject *lobj = (PyLongObject *)o; + Py_ssize_t size = lobj->ob_base.ob_size; + if (likely(size == 1)) { + // Fast path + digit x = lobj->ob_digit[0]; + if (x < 0x8000) + return x; + } else if (likely(size == 0)) { + return 0; + } + #endif + } + // Slow path + return CPyLong_AsInt16_(o); +} + +static inline uint8_t CPyLong_AsUInt8(PyObject *o) { + if (likely(PyLong_Check(o))) { + #if CPY_3_12_FEATURES + PyLongObject *lobj = (PyLongObject *)o; + size_t tag = CPY_LONG_TAG(lobj); + if (likely(tag == (1 << CPY_NON_SIZE_BITS))) { + // Fast path + digit x = CPY_LONG_DIGIT(lobj, 0); + if (x < 256) + return x; + } else if (likely(tag == CPY_SIGN_ZERO)) { + return 0; + } + #else + PyLongObject *lobj = (PyLongObject *)o; + Py_ssize_t size = lobj->ob_base.ob_size; + if (likely(size == 1)) { + // Fast path + digit x = lobj->ob_digit[0]; + if (x < 256) + return x; + } else if (likely(size == 0)) { + return 0; + } + #endif + } + // Slow path + return CPyLong_AsUInt8_(o); +} + +static inline CPyTagged CPyTagged_Negate(CPyTagged num) { + if (likely(CPyTagged_CheckShort(num) + && num != (CPyTagged) ((Py_ssize_t)1 << (CPY_INT_BITS - 1)))) { + // The only possibility of an overflow error happening when negating a short is if we + // attempt to negate the most negative number. + return -num; + } + return CPyTagged_Negate_(num); +} + +static inline CPyTagged CPyTagged_Add(CPyTagged left, CPyTagged right) { + // TODO: Use clang/gcc extension __builtin_saddll_overflow instead. + if (likely(CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right))) { + CPyTagged sum = left + right; + if (likely(!CPyTagged_IsAddOverflow(sum, left, right))) { + return sum; + } + } + return CPyTagged_Add_(left, right); +} + +static inline CPyTagged CPyTagged_Subtract(CPyTagged left, CPyTagged right) { + // TODO: Use clang/gcc extension __builtin_saddll_overflow instead. + if (likely(CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right))) { + CPyTagged diff = left - right; + if (likely(!CPyTagged_IsSubtractOverflow(diff, left, right))) { + return diff; + } + } + return CPyTagged_Subtract_(left, right); +} + +static inline CPyTagged CPyTagged_Multiply(CPyTagged left, CPyTagged right) { + // TODO: Consider using some clang/gcc extension to check for overflow + if (CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right)) { + if (!CPyTagged_IsMultiplyOverflow(left, right)) { + return left * CPyTagged_ShortAsSsize_t(right); + } + } + return CPyTagged_Multiply_(left, right); +} + +static inline CPyTagged CPyTagged_FloorDivide(CPyTagged left, CPyTagged right) { + if (CPyTagged_CheckShort(left) + && CPyTagged_CheckShort(right) + && !CPyTagged_MaybeFloorDivideFault(left, right)) { + Py_ssize_t result = CPyTagged_ShortAsSsize_t(left) / CPyTagged_ShortAsSsize_t(right); + if (((Py_ssize_t)left < 0) != (((Py_ssize_t)right) < 0)) { + if (result * right != left) { + // Round down + result--; + } + } + return result << 1; + } + return CPyTagged_FloorDivide_(left, right); +} + +static inline CPyTagged CPyTagged_Remainder(CPyTagged left, CPyTagged right) { + if (CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right) + && !CPyTagged_MaybeRemainderFault(left, right)) { + Py_ssize_t result = (Py_ssize_t)left % (Py_ssize_t)right; + if (((Py_ssize_t)right < 0) != ((Py_ssize_t)left < 0) && result != 0) { + result += right; + } + return result; + } + return CPyTagged_Remainder_(left, right); +} + +// Bitwise '~' +static inline CPyTagged CPyTagged_Invert(CPyTagged num) { + if (likely(CPyTagged_CheckShort(num) && num != CPY_TAGGED_ABS_MIN)) { + return ~num & ~CPY_INT_TAG; + } + return CPyTagged_Invert_(num); +} + +// Bitwise '&' +static inline CPyTagged CPyTagged_And(CPyTagged left, CPyTagged right) { + if (likely(CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right))) { + return left & right; + } + return CPyTagged_BitwiseLongOp_(left, right, '&'); +} + +// Bitwise '|' +static inline CPyTagged CPyTagged_Or(CPyTagged left, CPyTagged right) { + if (likely(CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right))) { + return left | right; + } + return CPyTagged_BitwiseLongOp_(left, right, '|'); +} + +// Bitwise '^' +static inline CPyTagged CPyTagged_Xor(CPyTagged left, CPyTagged right) { + if (likely(CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right))) { + return left ^ right; + } + return CPyTagged_BitwiseLongOp_(left, right, '^'); +} + +// Bitwise '>>' +static inline CPyTagged CPyTagged_Rshift(CPyTagged left, CPyTagged right) { + if (likely(CPyTagged_CheckShort(left) + && CPyTagged_CheckShort(right) + && (Py_ssize_t)right >= 0)) { + CPyTagged count = CPyTagged_ShortAsSsize_t(right); + if (unlikely(count >= CPY_INT_BITS)) { + if ((Py_ssize_t)left >= 0) { + return 0; + } else { + return CPyTagged_ShortFromInt(-1); + } + } + return ((Py_ssize_t)left >> count) & ~CPY_INT_TAG; + } + return CPyTagged_Rshift_(left, right); +} + +static inline bool IsShortLshiftOverflow(Py_ssize_t short_int, Py_ssize_t shift) { + return ((Py_ssize_t)(short_int << shift) >> shift) != short_int; +} + +// Bitwise '<<' +static inline CPyTagged CPyTagged_Lshift(CPyTagged left, CPyTagged right) { + if (likely(CPyTagged_CheckShort(left) + && CPyTagged_CheckShort(right) + && (Py_ssize_t)right >= 0 + && right < CPY_INT_BITS * 2)) { + CPyTagged shift = CPyTagged_ShortAsSsize_t(right); + if (!IsShortLshiftOverflow(left, shift)) + // Short integers, no overflow + return left << shift; + } + return CPyTagged_Lshift_(left, right); +} + // Float operations @@ -630,6 +901,7 @@ PyObject *CPySingledispatch_RegisterFunction(PyObject *singledispatch_func, PyOb PyObject *CPy_GetAIter(PyObject *obj); PyObject *CPy_GetANext(PyObject *aiter); +void CPy_SetTypeAliasTypeComputeFunction(PyObject *alias, PyObject *compute_value); #ifdef __cplusplus } diff --git a/mypyc/lib-rt/int_ops.c b/mypyc/lib-rt/int_ops.c index b57d88c6ac934..9b5d4ef65fb19 100644 --- a/mypyc/lib-rt/int_ops.c +++ b/mypyc/lib-rt/int_ops.c @@ -44,41 +44,6 @@ CPyTagged CPyTagged_FromInt64(int64_t value) { } } -CPyTagged CPyTagged_FromObject(PyObject *object) { - int overflow; - // The overflow check knows about CPyTagged's width - Py_ssize_t value = CPyLong_AsSsize_tAndOverflow(object, &overflow); - if (unlikely(overflow != 0)) { - Py_INCREF(object); - return ((CPyTagged)object) | CPY_INT_TAG; - } else { - return value << 1; - } -} - -CPyTagged CPyTagged_StealFromObject(PyObject *object) { - int overflow; - // The overflow check knows about CPyTagged's width - Py_ssize_t value = CPyLong_AsSsize_tAndOverflow(object, &overflow); - if (unlikely(overflow != 0)) { - return ((CPyTagged)object) | CPY_INT_TAG; - } else { - Py_DECREF(object); - return value << 1; - } -} - -CPyTagged CPyTagged_BorrowFromObject(PyObject *object) { - int overflow; - // The overflow check knows about CPyTagged's width - Py_ssize_t value = CPyLong_AsSsize_tAndOverflow(object, &overflow); - if (unlikely(overflow != 0)) { - return ((CPyTagged)object) | CPY_INT_TAG; - } else { - return value << 1; - } -} - PyObject *CPyTagged_AsObject(CPyTagged x) { PyObject *value; if (unlikely(CPyTagged_CheckLong(x))) { @@ -135,13 +100,8 @@ void CPyTagged_XDecRef(CPyTagged x) { } } -CPyTagged CPyTagged_Negate(CPyTagged num) { - if (CPyTagged_CheckShort(num) - && num != (CPyTagged) ((Py_ssize_t)1 << (CPY_INT_BITS - 1))) { - // The only possibility of an overflow error happening when negating a short is if we - // attempt to negate the most negative number. - return -num; - } +// Tagged int negation slow path, where the result may be a long integer +CPyTagged CPyTagged_Negate_(CPyTagged num) { PyObject *num_obj = CPyTagged_AsObject(num); PyObject *result = PyNumber_Negative(num_obj); if (result == NULL) { @@ -151,14 +111,8 @@ CPyTagged CPyTagged_Negate(CPyTagged num) { return CPyTagged_StealFromObject(result); } -CPyTagged CPyTagged_Add(CPyTagged left, CPyTagged right) { - // TODO: Use clang/gcc extension __builtin_saddll_overflow instead. - if (likely(CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right))) { - CPyTagged sum = left + right; - if (likely(!CPyTagged_IsAddOverflow(sum, left, right))) { - return sum; - } - } +// Tagged int addition slow path, where the result may be a long integer +CPyTagged CPyTagged_Add_(CPyTagged left, CPyTagged right) { PyObject *left_obj = CPyTagged_AsObject(left); PyObject *right_obj = CPyTagged_AsObject(right); PyObject *result = PyNumber_Add(left_obj, right_obj); @@ -170,14 +124,8 @@ CPyTagged CPyTagged_Add(CPyTagged left, CPyTagged right) { return CPyTagged_StealFromObject(result); } -CPyTagged CPyTagged_Subtract(CPyTagged left, CPyTagged right) { - // TODO: Use clang/gcc extension __builtin_saddll_overflow instead. - if (likely(CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right))) { - CPyTagged diff = left - right; - if (likely(!CPyTagged_IsSubtractOverflow(diff, left, right))) { - return diff; - } - } +// Tagged int subraction slow path, where the result may be a long integer +CPyTagged CPyTagged_Subtract_(CPyTagged left, CPyTagged right) { PyObject *left_obj = CPyTagged_AsObject(left); PyObject *right_obj = CPyTagged_AsObject(right); PyObject *result = PyNumber_Subtract(left_obj, right_obj); @@ -189,13 +137,8 @@ CPyTagged CPyTagged_Subtract(CPyTagged left, CPyTagged right) { return CPyTagged_StealFromObject(result); } -CPyTagged CPyTagged_Multiply(CPyTagged left, CPyTagged right) { - // TODO: Consider using some clang/gcc extension - if (CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right)) { - if (!CPyTagged_IsMultiplyOverflow(left, right)) { - return left * CPyTagged_ShortAsSsize_t(right); - } - } +// Tagged int multiplication slow path, where the result may be a long integer +CPyTagged CPyTagged_Multiply_(CPyTagged left, CPyTagged right) { PyObject *left_obj = CPyTagged_AsObject(left); PyObject *right_obj = CPyTagged_AsObject(right); PyObject *result = PyNumber_Multiply(left_obj, right_obj); @@ -207,19 +150,8 @@ CPyTagged CPyTagged_Multiply(CPyTagged left, CPyTagged right) { return CPyTagged_StealFromObject(result); } -CPyTagged CPyTagged_FloorDivide(CPyTagged left, CPyTagged right) { - if (CPyTagged_CheckShort(left) - && CPyTagged_CheckShort(right) - && !CPyTagged_MaybeFloorDivideFault(left, right)) { - Py_ssize_t result = CPyTagged_ShortAsSsize_t(left) / CPyTagged_ShortAsSsize_t(right); - if (((Py_ssize_t)left < 0) != (((Py_ssize_t)right) < 0)) { - if (result * right != left) { - // Round down - result--; - } - } - return result << 1; - } +// Tagged int // slow path, where the result may be a long integer (or raise) +CPyTagged CPyTagged_FloorDivide_(CPyTagged left, CPyTagged right) { PyObject *left_obj = CPyTagged_AsObject(left); PyObject *right_obj = CPyTagged_AsObject(right); PyObject *result = PyNumber_FloorDivide(left_obj, right_obj); @@ -233,15 +165,8 @@ CPyTagged CPyTagged_FloorDivide(CPyTagged left, CPyTagged right) { } } -CPyTagged CPyTagged_Remainder(CPyTagged left, CPyTagged right) { - if (CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right) - && !CPyTagged_MaybeRemainderFault(left, right)) { - Py_ssize_t result = (Py_ssize_t)left % (Py_ssize_t)right; - if (((Py_ssize_t)right < 0) != ((Py_ssize_t)left < 0) && result != 0) { - result += right; - } - return result; - } +// Tagged int % slow path, where the result may be a long integer (or raise) +CPyTagged CPyTagged_Remainder_(CPyTagged left, CPyTagged right) { PyObject *left_obj = CPyTagged_AsObject(left); PyObject *right_obj = CPyTagged_AsObject(right); PyObject *result = PyNumber_Remainder(left_obj, right_obj); @@ -368,7 +293,7 @@ static digit *GetIntDigits(CPyTagged n, Py_ssize_t *size, digit *buf) { // Shared implementation of bitwise '&', '|' and '^' (specified by op) for at least // one long operand. This is somewhat optimized for performance. -static CPyTagged BitwiseLongOp(CPyTagged a, CPyTagged b, char op) { +CPyTagged CPyTagged_BitwiseLongOp_(CPyTagged a, CPyTagged b, char op) { // Directly access the digits, as there is no fast C API function for this. digit abuf[3]; digit bbuf[3]; @@ -419,89 +344,34 @@ static CPyTagged BitwiseLongOp(CPyTagged a, CPyTagged b, char op) { return CPyTagged_StealFromObject((PyObject *)r); } -// Bitwise '&' -CPyTagged CPyTagged_And(CPyTagged left, CPyTagged right) { - if (likely(CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right))) { - return left & right; - } - return BitwiseLongOp(left, right, '&'); -} - -// Bitwise '|' -CPyTagged CPyTagged_Or(CPyTagged left, CPyTagged right) { - if (likely(CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right))) { - return left | right; - } - return BitwiseLongOp(left, right, '|'); -} - -// Bitwise '^' -CPyTagged CPyTagged_Xor(CPyTagged left, CPyTagged right) { - if (likely(CPyTagged_CheckShort(left) && CPyTagged_CheckShort(right))) { - return left ^ right; - } - return BitwiseLongOp(left, right, '^'); -} - -// Bitwise '~' -CPyTagged CPyTagged_Invert(CPyTagged num) { - if (likely(CPyTagged_CheckShort(num) && num != CPY_TAGGED_ABS_MIN)) { - return ~num & ~CPY_INT_TAG; - } else { - PyObject *obj = CPyTagged_AsObject(num); - PyObject *result = PyNumber_Invert(obj); - if (unlikely(result == NULL)) { - CPyError_OutOfMemory(); - } - Py_DECREF(obj); - return CPyTagged_StealFromObject(result); +// Bitwise '~' slow path +CPyTagged CPyTagged_Invert_(CPyTagged num) { + PyObject *obj = CPyTagged_AsObject(num); + PyObject *result = PyNumber_Invert(obj); + if (unlikely(result == NULL)) { + CPyError_OutOfMemory(); } + Py_DECREF(obj); + return CPyTagged_StealFromObject(result); } -// Bitwise '>>' -CPyTagged CPyTagged_Rshift(CPyTagged left, CPyTagged right) { - if (likely(CPyTagged_CheckShort(left) - && CPyTagged_CheckShort(right) - && (Py_ssize_t)right >= 0)) { - CPyTagged count = CPyTagged_ShortAsSsize_t(right); - if (unlikely(count >= CPY_INT_BITS)) { - if ((Py_ssize_t)left >= 0) { - return 0; - } else { - return CPyTagged_ShortFromInt(-1); - } - } - return ((Py_ssize_t)left >> count) & ~CPY_INT_TAG; - } else { - // Long integer or negative shift -- use generic op - PyObject *lobj = CPyTagged_AsObject(left); - PyObject *robj = CPyTagged_AsObject(right); - PyObject *result = PyNumber_Rshift(lobj, robj); - Py_DECREF(lobj); - Py_DECREF(robj); - if (result == NULL) { - // Propagate error (could be negative shift count) - return CPY_INT_TAG; - } - return CPyTagged_StealFromObject(result); +// Bitwise '>>' slow path +CPyTagged CPyTagged_Rshift_(CPyTagged left, CPyTagged right) { + // Long integer or negative shift -- use generic op + PyObject *lobj = CPyTagged_AsObject(left); + PyObject *robj = CPyTagged_AsObject(right); + PyObject *result = PyNumber_Rshift(lobj, robj); + Py_DECREF(lobj); + Py_DECREF(robj); + if (result == NULL) { + // Propagate error (could be negative shift count) + return CPY_INT_TAG; } + return CPyTagged_StealFromObject(result); } -static inline bool IsShortLshiftOverflow(Py_ssize_t short_int, Py_ssize_t shift) { - return ((Py_ssize_t)(short_int << shift) >> shift) != short_int; -} - -// Bitwise '<<' -CPyTagged CPyTagged_Lshift(CPyTagged left, CPyTagged right) { - if (likely(CPyTagged_CheckShort(left) - && CPyTagged_CheckShort(right) - && (Py_ssize_t)right >= 0 - && right < CPY_INT_BITS * 2)) { - CPyTagged shift = CPyTagged_ShortAsSsize_t(right); - if (!IsShortLshiftOverflow(left, shift)) - // Short integers, no overflow - return left << shift; - } +// Bitwise '<<' slow path +CPyTagged CPyTagged_Lshift_(CPyTagged left, CPyTagged right) { // Long integer or out of range shift -- use generic op PyObject *lobj = CPyTagged_AsObject(left); PyObject *robj = CPyTagged_AsObject(right); @@ -515,18 +385,8 @@ CPyTagged CPyTagged_Lshift(CPyTagged left, CPyTagged right) { return CPyTagged_StealFromObject(result); } -int64_t CPyLong_AsInt64(PyObject *o) { - if (likely(PyLong_Check(o))) { - PyLongObject *lobj = (PyLongObject *)o; - Py_ssize_t size = Py_SIZE(lobj); - if (likely(size == 1)) { - // Fast path - return CPY_LONG_DIGIT(lobj, 0); - } else if (likely(size == 0)) { - return 0; - } - } - // Slow path +// i64 unboxing slow path +int64_t CPyLong_AsInt64_(PyObject *o) { int overflow; int64_t result = PyLong_AsLongLongAndOverflow(o, &overflow); if (result == -1) { @@ -574,29 +434,8 @@ int64_t CPyInt64_Remainder(int64_t x, int64_t y) { return d; } -int32_t CPyLong_AsInt32(PyObject *o) { - if (likely(PyLong_Check(o))) { - #if CPY_3_12_FEATURES - PyLongObject *lobj = (PyLongObject *)o; - size_t tag = CPY_LONG_TAG(lobj); - if (likely(tag == (1 << CPY_NON_SIZE_BITS))) { - // Fast path - return CPY_LONG_DIGIT(lobj, 0); - } else if (likely(tag == CPY_SIGN_ZERO)) { - return 0; - } - #else - PyLongObject *lobj = (PyLongObject *)o; - Py_ssize_t size = lobj->ob_base.ob_size; - if (likely(size == 1)) { - // Fast path - return CPY_LONG_DIGIT(lobj, 0); - } else if (likely(size == 0)) { - return 0; - } - #endif - } - // Slow path +// i32 unboxing slow path +int32_t CPyLong_AsInt32_(PyObject *o) { int overflow; long result = PyLong_AsLongAndOverflow(o, &overflow); if (result > 0x7fffffffLL || result < -0x80000000LL) { @@ -652,33 +491,8 @@ void CPyInt32_Overflow() { PyErr_SetString(PyExc_OverflowError, "int too large to convert to i32"); } -int16_t CPyLong_AsInt16(PyObject *o) { - if (likely(PyLong_Check(o))) { - #if CPY_3_12_FEATURES - PyLongObject *lobj = (PyLongObject *)o; - size_t tag = CPY_LONG_TAG(lobj); - if (likely(tag == (1 << CPY_NON_SIZE_BITS))) { - // Fast path - digit x = CPY_LONG_DIGIT(lobj, 0); - if (x < 0x8000) - return x; - } else if (likely(tag == CPY_SIGN_ZERO)) { - return 0; - } - #else - PyLongObject *lobj = (PyLongObject *)o; - Py_ssize_t size = lobj->ob_base.ob_size; - if (likely(size == 1)) { - // Fast path - digit x = lobj->ob_digit[0]; - if (x < 0x8000) - return x; - } else if (likely(size == 0)) { - return 0; - } - #endif - } - // Slow path +// i16 unboxing slow path +int16_t CPyLong_AsInt16_(PyObject *o) { int overflow; long result = PyLong_AsLongAndOverflow(o, &overflow); if (result > 0x7fff || result < -0x8000) { @@ -734,34 +548,8 @@ void CPyInt16_Overflow() { PyErr_SetString(PyExc_OverflowError, "int too large to convert to i16"); } - -uint8_t CPyLong_AsUInt8(PyObject *o) { - if (likely(PyLong_Check(o))) { - #if CPY_3_12_FEATURES - PyLongObject *lobj = (PyLongObject *)o; - size_t tag = CPY_LONG_TAG(lobj); - if (likely(tag == (1 << CPY_NON_SIZE_BITS))) { - // Fast path - digit x = CPY_LONG_DIGIT(lobj, 0); - if (x < 256) - return x; - } else if (likely(tag == CPY_SIGN_ZERO)) { - return 0; - } - #else - PyLongObject *lobj = (PyLongObject *)o; - Py_ssize_t size = lobj->ob_base.ob_size; - if (likely(size == 1)) { - // Fast path - digit x = lobj->ob_digit[0]; - if (x < 256) - return x; - } else if (likely(size == 0)) { - return 0; - } - #endif - } - // Slow path +// u8 unboxing slow path +uint8_t CPyLong_AsUInt8_(PyObject *o) { int overflow; long result = PyLong_AsLongAndOverflow(o, &overflow); if (result < 0 || result >= 256) { diff --git a/mypyc/lib-rt/misc_ops.c b/mypyc/lib-rt/misc_ops.c index f28eeb57e6467..803123d436a2e 100644 --- a/mypyc/lib-rt/misc_ops.c +++ b/mypyc/lib-rt/misc_ops.c @@ -940,3 +940,34 @@ PyObject *CPy_GetANext(PyObject *aiter) error: return NULL; } + +#ifdef CPY_3_12_FEATURES + +// Copied from Python 3.12.3, since this struct is internal to CPython. It defines +// the structure of typing.TypeAliasType objects. We need it since compute_value is +// not part of the public API, and we need to set it to match Python runtime semantics. +// +// IMPORTANT: This needs to be kept in sync with CPython! +typedef struct { + PyObject_HEAD + PyObject *name; + PyObject *type_params; + PyObject *compute_value; + PyObject *value; + PyObject *module; +} typealiasobject; + +void CPy_SetTypeAliasTypeComputeFunction(PyObject *alias, PyObject *compute_value) { + typealiasobject *obj = (typealiasobject *)alias; + if (obj->value != NULL) { + Py_DECREF(obj->value); + } + obj->value = NULL; + Py_INCREF(compute_value); + if (obj->compute_value != NULL) { + Py_DECREF(obj->compute_value); + } + obj->compute_value = compute_value; +} + +#endif diff --git a/mypyc/lib-rt/pythoncapi_compat.h b/mypyc/lib-rt/pythoncapi_compat.h index f22e92f7358f8..1b59f93de7ece 100644 --- a/mypyc/lib-rt/pythoncapi_compat.h +++ b/mypyc/lib-rt/pythoncapi_compat.h @@ -19,34 +19,25 @@ extern "C" { #endif #include -#include "frameobject.h" // PyFrameObject, PyFrame_GetBack() - -// Compatibility with Visual Studio 2013 and older which don't support -// the inline keyword in C (only in C++): use __inline instead. -#if (defined(_MSC_VER) && _MSC_VER < 1900 \ - && !defined(__cplusplus) && !defined(inline)) -# define PYCAPI_COMPAT_STATIC_INLINE(TYPE) static __inline TYPE -#else -# define PYCAPI_COMPAT_STATIC_INLINE(TYPE) static inline TYPE +// Python 3.11.0b4 added PyFrame_Back() to Python.h +#if PY_VERSION_HEX < 0x030b00B4 && !defined(PYPY_VERSION) +# include "frameobject.h" // PyFrameObject, PyFrame_GetBack() #endif -// C++ compatibility: _Py_CAST() and _Py_NULL #ifndef _Py_CAST -# ifdef __cplusplus -# define _Py_CAST(type, expr) \ - const_cast(reinterpret_cast(expr)) -# else -# define _Py_CAST(type, expr) ((type)(expr)) -# endif +# define _Py_CAST(type, expr) ((type)(expr)) #endif -#ifndef _Py_NULL -# ifdef __cplusplus -# define _Py_NULL nullptr -# else -# define _Py_NULL NULL -# endif + +// Static inline functions should use _Py_NULL rather than using directly NULL +// to prevent C++ compiler warnings. On C23 and newer and on C++11 and newer, +// _Py_NULL is defined as nullptr. +#if (defined (__STDC_VERSION__) && __STDC_VERSION__ > 201710L) \ + || (defined(__cplusplus) && __cplusplus >= 201103) +# define _Py_NULL nullptr +#else +# define _Py_NULL NULL #endif // Cast argument to PyObject* type. @@ -57,8 +48,7 @@ extern "C" { // bpo-42262 added Py_NewRef() to Python 3.10.0a3 #if PY_VERSION_HEX < 0x030A00A3 && !defined(Py_NewRef) -PYCAPI_COMPAT_STATIC_INLINE(PyObject*) -_Py_NewRef(PyObject *obj) +static inline PyObject* _Py_NewRef(PyObject *obj) { Py_INCREF(obj); return obj; @@ -69,8 +59,7 @@ _Py_NewRef(PyObject *obj) // bpo-42262 added Py_XNewRef() to Python 3.10.0a3 #if PY_VERSION_HEX < 0x030A00A3 && !defined(Py_XNewRef) -PYCAPI_COMPAT_STATIC_INLINE(PyObject*) -_Py_XNewRef(PyObject *obj) +static inline PyObject* _Py_XNewRef(PyObject *obj) { Py_XINCREF(obj); return obj; @@ -81,8 +70,7 @@ _Py_XNewRef(PyObject *obj) // bpo-39573 added Py_SET_REFCNT() to Python 3.9.0a4 #if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_REFCNT) -PYCAPI_COMPAT_STATIC_INLINE(void) -_Py_SET_REFCNT(PyObject *ob, Py_ssize_t refcnt) +static inline void _Py_SET_REFCNT(PyObject *ob, Py_ssize_t refcnt) { ob->ob_refcnt = refcnt; } @@ -93,18 +81,20 @@ _Py_SET_REFCNT(PyObject *ob, Py_ssize_t refcnt) // Py_SETREF() and Py_XSETREF() were added to Python 3.5.2. // It is excluded from the limited C API. #if (PY_VERSION_HEX < 0x03050200 && !defined(Py_SETREF)) && !defined(Py_LIMITED_API) -#define Py_SETREF(op, op2) \ - do { \ - PyObject *_py_tmp = _PyObject_CAST(op); \ - (op) = (op2); \ - Py_DECREF(_py_tmp); \ +#define Py_SETREF(dst, src) \ + do { \ + PyObject **_tmp_dst_ptr = _Py_CAST(PyObject**, &(dst)); \ + PyObject *_tmp_dst = (*_tmp_dst_ptr); \ + *_tmp_dst_ptr = _PyObject_CAST(src); \ + Py_DECREF(_tmp_dst); \ } while (0) -#define Py_XSETREF(op, op2) \ - do { \ - PyObject *_py_tmp = _PyObject_CAST(op); \ - (op) = (op2); \ - Py_XDECREF(_py_tmp); \ +#define Py_XSETREF(dst, src) \ + do { \ + PyObject **_tmp_dst_ptr = _Py_CAST(PyObject**, &(dst)); \ + PyObject *_tmp_dst = (*_tmp_dst_ptr); \ + *_tmp_dst_ptr = _PyObject_CAST(src); \ + Py_XDECREF(_tmp_dst); \ } while (0) #endif @@ -117,18 +107,17 @@ _Py_SET_REFCNT(PyObject *ob, Py_ssize_t refcnt) #if PY_VERSION_HEX < 0x030A00B1 && !defined(Py_IsNone) # define Py_IsNone(x) Py_Is(x, Py_None) #endif -#if PY_VERSION_HEX < 0x030A00B1 && !defined(Py_IsTrue) +#if (PY_VERSION_HEX < 0x030A00B1 || defined(PYPY_VERSION)) && !defined(Py_IsTrue) # define Py_IsTrue(x) Py_Is(x, Py_True) #endif -#if PY_VERSION_HEX < 0x030A00B1 && !defined(Py_IsFalse) +#if (PY_VERSION_HEX < 0x030A00B1 || defined(PYPY_VERSION)) && !defined(Py_IsFalse) # define Py_IsFalse(x) Py_Is(x, Py_False) #endif // bpo-39573 added Py_SET_TYPE() to Python 3.9.0a4 #if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_TYPE) -PYCAPI_COMPAT_STATIC_INLINE(void) -_Py_SET_TYPE(PyObject *ob, PyTypeObject *type) +static inline void _Py_SET_TYPE(PyObject *ob, PyTypeObject *type) { ob->ob_type = type; } @@ -138,8 +127,7 @@ _Py_SET_TYPE(PyObject *ob, PyTypeObject *type) // bpo-39573 added Py_SET_SIZE() to Python 3.9.0a4 #if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_SIZE) -PYCAPI_COMPAT_STATIC_INLINE(void) -_Py_SET_SIZE(PyVarObject *ob, Py_ssize_t size) +static inline void _Py_SET_SIZE(PyVarObject *ob, Py_ssize_t size) { ob->ob_size = size; } @@ -148,9 +136,8 @@ _Py_SET_SIZE(PyVarObject *ob, Py_ssize_t size) // bpo-40421 added PyFrame_GetCode() to Python 3.9.0b1 -#if PY_VERSION_HEX < 0x030900B1 -PYCAPI_COMPAT_STATIC_INLINE(PyCodeObject*) -PyFrame_GetCode(PyFrameObject *frame) +#if PY_VERSION_HEX < 0x030900B1 || defined(PYPY_VERSION) +static inline PyCodeObject* PyFrame_GetCode(PyFrameObject *frame) { assert(frame != _Py_NULL); assert(frame->f_code != _Py_NULL); @@ -158,8 +145,7 @@ PyFrame_GetCode(PyFrameObject *frame) } #endif -PYCAPI_COMPAT_STATIC_INLINE(PyCodeObject*) -_PyFrame_GetCodeBorrow(PyFrameObject *frame) +static inline PyCodeObject* _PyFrame_GetCodeBorrow(PyFrameObject *frame) { PyCodeObject *code = PyFrame_GetCode(frame); Py_DECREF(code); @@ -169,8 +155,7 @@ _PyFrame_GetCodeBorrow(PyFrameObject *frame) // bpo-40421 added PyFrame_GetBack() to Python 3.9.0b1 #if PY_VERSION_HEX < 0x030900B1 && !defined(PYPY_VERSION) -PYCAPI_COMPAT_STATIC_INLINE(PyFrameObject*) -PyFrame_GetBack(PyFrameObject *frame) +static inline PyFrameObject* PyFrame_GetBack(PyFrameObject *frame) { assert(frame != _Py_NULL); return _Py_CAST(PyFrameObject*, Py_XNewRef(frame->f_back)); @@ -178,8 +163,7 @@ PyFrame_GetBack(PyFrameObject *frame) #endif #if !defined(PYPY_VERSION) -PYCAPI_COMPAT_STATIC_INLINE(PyFrameObject*) -_PyFrame_GetBackBorrow(PyFrameObject *frame) +static inline PyFrameObject* _PyFrame_GetBackBorrow(PyFrameObject *frame) { PyFrameObject *back = PyFrame_GetBack(frame); Py_XDECREF(back); @@ -190,8 +174,7 @@ _PyFrame_GetBackBorrow(PyFrameObject *frame) // bpo-40421 added PyFrame_GetLocals() to Python 3.11.0a7 #if PY_VERSION_HEX < 0x030B00A7 && !defined(PYPY_VERSION) -PYCAPI_COMPAT_STATIC_INLINE(PyObject*) -PyFrame_GetLocals(PyFrameObject *frame) +static inline PyObject* PyFrame_GetLocals(PyFrameObject *frame) { #if PY_VERSION_HEX >= 0x030400B1 if (PyFrame_FastToLocalsWithError(frame) < 0) { @@ -207,8 +190,7 @@ PyFrame_GetLocals(PyFrameObject *frame) // bpo-40421 added PyFrame_GetGlobals() to Python 3.11.0a7 #if PY_VERSION_HEX < 0x030B00A7 && !defined(PYPY_VERSION) -PYCAPI_COMPAT_STATIC_INLINE(PyObject*) -PyFrame_GetGlobals(PyFrameObject *frame) +static inline PyObject* PyFrame_GetGlobals(PyFrameObject *frame) { return Py_NewRef(frame->f_globals); } @@ -217,8 +199,7 @@ PyFrame_GetGlobals(PyFrameObject *frame) // bpo-40421 added PyFrame_GetBuiltins() to Python 3.11.0a7 #if PY_VERSION_HEX < 0x030B00A7 && !defined(PYPY_VERSION) -PYCAPI_COMPAT_STATIC_INLINE(PyObject*) -PyFrame_GetBuiltins(PyFrameObject *frame) +static inline PyObject* PyFrame_GetBuiltins(PyFrameObject *frame) { return Py_NewRef(frame->f_builtins); } @@ -227,8 +208,7 @@ PyFrame_GetBuiltins(PyFrameObject *frame) // bpo-40421 added PyFrame_GetLasti() to Python 3.11.0b1 #if PY_VERSION_HEX < 0x030B00B1 && !defined(PYPY_VERSION) -PYCAPI_COMPAT_STATIC_INLINE(int) -PyFrame_GetLasti(PyFrameObject *frame) +static inline int PyFrame_GetLasti(PyFrameObject *frame) { #if PY_VERSION_HEX >= 0x030A00A7 // bpo-27129: Since Python 3.10.0a7, f_lasti is an instruction offset, @@ -245,9 +225,63 @@ PyFrame_GetLasti(PyFrameObject *frame) #endif +// gh-91248 added PyFrame_GetVar() to Python 3.12.0a2 +#if PY_VERSION_HEX < 0x030C00A2 && !defined(PYPY_VERSION) +static inline PyObject* PyFrame_GetVar(PyFrameObject *frame, PyObject *name) +{ + PyObject *locals, *value; + + locals = PyFrame_GetLocals(frame); + if (locals == NULL) { + return NULL; + } +#if PY_VERSION_HEX >= 0x03000000 + value = PyDict_GetItemWithError(locals, name); +#else + value = _PyDict_GetItemWithError(locals, name); +#endif + Py_DECREF(locals); + + if (value == NULL) { + if (PyErr_Occurred()) { + return NULL; + } +#if PY_VERSION_HEX >= 0x03000000 + PyErr_Format(PyExc_NameError, "variable %R does not exist", name); +#else + PyErr_SetString(PyExc_NameError, "variable does not exist"); +#endif + return NULL; + } + return Py_NewRef(value); +} +#endif + + +// gh-91248 added PyFrame_GetVarString() to Python 3.12.0a2 +#if PY_VERSION_HEX < 0x030C00A2 && !defined(PYPY_VERSION) +static inline PyObject* +PyFrame_GetVarString(PyFrameObject *frame, const char *name) +{ + PyObject *name_obj, *value; +#if PY_VERSION_HEX >= 0x03000000 + name_obj = PyUnicode_FromString(name); +#else + name_obj = PyString_FromString(name); +#endif + if (name_obj == NULL) { + return NULL; + } + value = PyFrame_GetVar(frame, name_obj); + Py_DECREF(name_obj); + return value; +} +#endif + + // bpo-39947 added PyThreadState_GetInterpreter() to Python 3.9.0a5 -#if PY_VERSION_HEX < 0x030900A5 -PYCAPI_COMPAT_STATIC_INLINE(PyInterpreterState *) +#if PY_VERSION_HEX < 0x030900A5 || defined(PYPY_VERSION) +static inline PyInterpreterState * PyThreadState_GetInterpreter(PyThreadState *tstate) { assert(tstate != _Py_NULL); @@ -258,8 +292,7 @@ PyThreadState_GetInterpreter(PyThreadState *tstate) // bpo-40429 added PyThreadState_GetFrame() to Python 3.9.0b1 #if PY_VERSION_HEX < 0x030900B1 && !defined(PYPY_VERSION) -PYCAPI_COMPAT_STATIC_INLINE(PyFrameObject*) -PyThreadState_GetFrame(PyThreadState *tstate) +static inline PyFrameObject* PyThreadState_GetFrame(PyThreadState *tstate) { assert(tstate != _Py_NULL); return _Py_CAST(PyFrameObject *, Py_XNewRef(tstate->frame)); @@ -267,7 +300,7 @@ PyThreadState_GetFrame(PyThreadState *tstate) #endif #if !defined(PYPY_VERSION) -PYCAPI_COMPAT_STATIC_INLINE(PyFrameObject*) +static inline PyFrameObject* _PyThreadState_GetFrameBorrow(PyThreadState *tstate) { PyFrameObject *frame = PyThreadState_GetFrame(tstate); @@ -278,9 +311,8 @@ _PyThreadState_GetFrameBorrow(PyThreadState *tstate) // bpo-39947 added PyInterpreterState_Get() to Python 3.9.0a5 -#if PY_VERSION_HEX < 0x030900A5 -PYCAPI_COMPAT_STATIC_INLINE(PyInterpreterState*) -PyInterpreterState_Get(void) +#if PY_VERSION_HEX < 0x030900A5 || defined(PYPY_VERSION) +static inline PyInterpreterState* PyInterpreterState_Get(void) { PyThreadState *tstate; PyInterpreterState *interp; @@ -300,8 +332,7 @@ PyInterpreterState_Get(void) // bpo-39947 added PyInterpreterState_Get() to Python 3.9.0a6 #if 0x030700A1 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x030900A6 && !defined(PYPY_VERSION) -PYCAPI_COMPAT_STATIC_INLINE(uint64_t) -PyThreadState_GetID(PyThreadState *tstate) +static inline uint64_t PyThreadState_GetID(PyThreadState *tstate) { assert(tstate != _Py_NULL); return tstate->id; @@ -310,8 +341,7 @@ PyThreadState_GetID(PyThreadState *tstate) // bpo-43760 added PyThreadState_EnterTracing() to Python 3.11.0a2 #if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION) -PYCAPI_COMPAT_STATIC_INLINE(void) -PyThreadState_EnterTracing(PyThreadState *tstate) +static inline void PyThreadState_EnterTracing(PyThreadState *tstate) { tstate->tracing++; #if PY_VERSION_HEX >= 0x030A00A1 @@ -324,8 +354,7 @@ PyThreadState_EnterTracing(PyThreadState *tstate) // bpo-43760 added PyThreadState_LeaveTracing() to Python 3.11.0a2 #if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION) -PYCAPI_COMPAT_STATIC_INLINE(void) -PyThreadState_LeaveTracing(PyThreadState *tstate) +static inline void PyThreadState_LeaveTracing(PyThreadState *tstate) { int use_tracing = (tstate->c_tracefunc != _Py_NULL || tstate->c_profilefunc != _Py_NULL); @@ -340,9 +369,9 @@ PyThreadState_LeaveTracing(PyThreadState *tstate) // bpo-37194 added PyObject_CallNoArgs() to Python 3.9.0a1 -#if PY_VERSION_HEX < 0x030900A1 -PYCAPI_COMPAT_STATIC_INLINE(PyObject*) -PyObject_CallNoArgs(PyObject *func) +// PyObject_CallNoArgs() added to PyPy 3.9.16-v7.3.11 +#if !defined(PyObject_CallNoArgs) && PY_VERSION_HEX < 0x030900A1 +static inline PyObject* PyObject_CallNoArgs(PyObject *func) { return PyObject_CallFunctionObjArgs(func, NULL); } @@ -351,9 +380,9 @@ PyObject_CallNoArgs(PyObject *func) // bpo-39245 made PyObject_CallOneArg() public (previously called // _PyObject_CallOneArg) in Python 3.9.0a4 -#if PY_VERSION_HEX < 0x030900A4 -PYCAPI_COMPAT_STATIC_INLINE(PyObject*) -PyObject_CallOneArg(PyObject *func, PyObject *arg) +// PyObject_CallOneArg() added to PyPy 3.9.16-v7.3.11 +#if !defined(PyObject_CallOneArg) && PY_VERSION_HEX < 0x030900A4 +static inline PyObject* PyObject_CallOneArg(PyObject *func, PyObject *arg) { return PyObject_CallFunctionObjArgs(func, arg, NULL); } @@ -362,10 +391,19 @@ PyObject_CallOneArg(PyObject *func, PyObject *arg) // bpo-1635741 added PyModule_AddObjectRef() to Python 3.10.0a3 #if PY_VERSION_HEX < 0x030A00A3 -PYCAPI_COMPAT_STATIC_INLINE(int) +static inline int PyModule_AddObjectRef(PyObject *module, const char *name, PyObject *value) { int res; + + if (!value && !PyErr_Occurred()) { + // PyModule_AddObject() raises TypeError in this case + PyErr_SetString(PyExc_SystemError, + "PyModule_AddObjectRef() must be called " + "with an exception raised if value is NULL"); + return -1; + } + Py_XINCREF(value); res = PyModule_AddObject(module, name, value); if (res < 0) { @@ -378,8 +416,7 @@ PyModule_AddObjectRef(PyObject *module, const char *name, PyObject *value) // bpo-40024 added PyModule_AddType() to Python 3.9.0a5 #if PY_VERSION_HEX < 0x030900A5 -PYCAPI_COMPAT_STATIC_INLINE(int) -PyModule_AddType(PyObject *module, PyTypeObject *type) +static inline int PyModule_AddType(PyObject *module, PyTypeObject *type) { const char *name, *dot; @@ -403,8 +440,7 @@ PyModule_AddType(PyObject *module, PyTypeObject *type) // bpo-40241 added PyObject_GC_IsTracked() to Python 3.9.0a6. // bpo-4688 added _PyObject_GC_IS_TRACKED() to Python 2.7.0a2. #if PY_VERSION_HEX < 0x030900A6 && !defined(PYPY_VERSION) -PYCAPI_COMPAT_STATIC_INLINE(int) -PyObject_GC_IsTracked(PyObject* obj) +static inline int PyObject_GC_IsTracked(PyObject* obj) { return (PyObject_IS_GC(obj) && _PyObject_GC_IS_TRACKED(obj)); } @@ -413,8 +449,7 @@ PyObject_GC_IsTracked(PyObject* obj) // bpo-40241 added PyObject_GC_IsFinalized() to Python 3.9.0a6. // bpo-18112 added _PyGCHead_FINALIZED() to Python 3.4.0 final. #if PY_VERSION_HEX < 0x030900A6 && PY_VERSION_HEX >= 0x030400F0 && !defined(PYPY_VERSION) -PYCAPI_COMPAT_STATIC_INLINE(int) -PyObject_GC_IsFinalized(PyObject *obj) +static inline int PyObject_GC_IsFinalized(PyObject *obj) { PyGC_Head *gc = _Py_CAST(PyGC_Head*, obj) - 1; return (PyObject_IS_GC(obj) && _PyGCHead_FINALIZED(gc)); @@ -424,8 +459,7 @@ PyObject_GC_IsFinalized(PyObject *obj) // bpo-39573 added Py_IS_TYPE() to Python 3.9.0a4 #if PY_VERSION_HEX < 0x030900A4 && !defined(Py_IS_TYPE) -PYCAPI_COMPAT_STATIC_INLINE(int) -_Py_IS_TYPE(PyObject *ob, PyTypeObject *type) { +static inline int _Py_IS_TYPE(PyObject *ob, PyTypeObject *type) { return Py_TYPE(ob) == type; } #define Py_IS_TYPE(ob, type) _Py_IS_TYPE(_PyObject_CAST(ob), type) @@ -437,12 +471,10 @@ _Py_IS_TYPE(PyObject *ob, PyTypeObject *type) { // Python 3.11a2 moved _PyFloat_Pack2() and _PyFloat_Unpack2() to the internal // C API: Python 3.11a2-3.11a6 versions are not supported. #if 0x030600B1 <= PY_VERSION_HEX && PY_VERSION_HEX <= 0x030B00A1 && !defined(PYPY_VERSION) -PYCAPI_COMPAT_STATIC_INLINE(int) -PyFloat_Pack2(double x, char *p, int le) +static inline int PyFloat_Pack2(double x, char *p, int le) { return _PyFloat_Pack2(x, (unsigned char*)p, le); } -PYCAPI_COMPAT_STATIC_INLINE(double) -PyFloat_Unpack2(const char *p, int le) +static inline double PyFloat_Unpack2(const char *p, int le) { return _PyFloat_Unpack2((const unsigned char *)p, le); } #endif @@ -453,34 +485,54 @@ PyFloat_Unpack2(const char *p, int le) // and _PyFloat_Unpack8() to the internal C API: Python 3.11a2-3.11a6 versions // are not supported. #if PY_VERSION_HEX <= 0x030B00A1 && !defined(PYPY_VERSION) -PYCAPI_COMPAT_STATIC_INLINE(int) -PyFloat_Pack4(double x, char *p, int le) +static inline int PyFloat_Pack4(double x, char *p, int le) { return _PyFloat_Pack4(x, (unsigned char*)p, le); } -PYCAPI_COMPAT_STATIC_INLINE(int) -PyFloat_Pack8(double x, char *p, int le) +static inline int PyFloat_Pack8(double x, char *p, int le) { return _PyFloat_Pack8(x, (unsigned char*)p, le); } -PYCAPI_COMPAT_STATIC_INLINE(double) -PyFloat_Unpack4(const char *p, int le) +static inline double PyFloat_Unpack4(const char *p, int le) { return _PyFloat_Unpack4((const unsigned char *)p, le); } -PYCAPI_COMPAT_STATIC_INLINE(double) -PyFloat_Unpack8(const char *p, int le) +static inline double PyFloat_Unpack8(const char *p, int le) { return _PyFloat_Unpack8((const unsigned char *)p, le); } #endif // gh-92154 added PyCode_GetCode() to Python 3.11.0b1 #if PY_VERSION_HEX < 0x030B00B1 && !defined(PYPY_VERSION) -PYCAPI_COMPAT_STATIC_INLINE(PyObject*) -PyCode_GetCode(PyCodeObject *code) +static inline PyObject* PyCode_GetCode(PyCodeObject *code) { return Py_NewRef(code->co_code); } #endif +// gh-95008 added PyCode_GetVarnames() to Python 3.11.0rc1 +#if PY_VERSION_HEX < 0x030B00C1 && !defined(PYPY_VERSION) +static inline PyObject* PyCode_GetVarnames(PyCodeObject *code) +{ + return Py_NewRef(code->co_varnames); +} +#endif + +// gh-95008 added PyCode_GetFreevars() to Python 3.11.0rc1 +#if PY_VERSION_HEX < 0x030B00C1 && !defined(PYPY_VERSION) +static inline PyObject* PyCode_GetFreevars(PyCodeObject *code) +{ + return Py_NewRef(code->co_freevars); +} +#endif + +// gh-95008 added PyCode_GetCellvars() to Python 3.11.0rc1 +#if PY_VERSION_HEX < 0x030B00C1 && !defined(PYPY_VERSION) +static inline PyObject* PyCode_GetCellvars(PyCodeObject *code) +{ + return Py_NewRef(code->co_cellvars); +} +#endif + + // Py_UNUSED() was added to Python 3.4.0b2. #if PY_VERSION_HEX < 0x030400B2 && !defined(Py_UNUSED) # if defined(__GNUC__) || defined(__clang__) @@ -491,6 +543,817 @@ PyCode_GetCode(PyCodeObject *code) #endif +// gh-105922 added PyImport_AddModuleRef() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A0 +static inline PyObject* PyImport_AddModuleRef(const char *name) +{ + return Py_XNewRef(PyImport_AddModule(name)); +} +#endif + + +// gh-105927 added PyWeakref_GetRef() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D0000 +static inline int PyWeakref_GetRef(PyObject *ref, PyObject **pobj) +{ + PyObject *obj; + if (ref != NULL && !PyWeakref_Check(ref)) { + *pobj = NULL; + PyErr_SetString(PyExc_TypeError, "expected a weakref"); + return -1; + } + obj = PyWeakref_GetObject(ref); + if (obj == NULL) { + // SystemError if ref is NULL + *pobj = NULL; + return -1; + } + if (obj == Py_None) { + *pobj = NULL; + return 0; + } + *pobj = Py_NewRef(obj); + return (*pobj != NULL); +} +#endif + + +// bpo-36974 added PY_VECTORCALL_ARGUMENTS_OFFSET to Python 3.8b1 +#ifndef PY_VECTORCALL_ARGUMENTS_OFFSET +# define PY_VECTORCALL_ARGUMENTS_OFFSET (_Py_CAST(size_t, 1) << (8 * sizeof(size_t) - 1)) +#endif + +// bpo-36974 added PyVectorcall_NARGS() to Python 3.8b1 +#if PY_VERSION_HEX < 0x030800B1 +static inline Py_ssize_t PyVectorcall_NARGS(size_t n) +{ + return n & ~PY_VECTORCALL_ARGUMENTS_OFFSET; +} +#endif + + +// gh-105922 added PyObject_Vectorcall() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 +static inline PyObject* +PyObject_Vectorcall(PyObject *callable, PyObject *const *args, + size_t nargsf, PyObject *kwnames) +{ +#if PY_VERSION_HEX >= 0x030800B1 && !defined(PYPY_VERSION) + // bpo-36974 added _PyObject_Vectorcall() to Python 3.8.0b1 + return _PyObject_Vectorcall(callable, args, nargsf, kwnames); +#else + PyObject *posargs = NULL, *kwargs = NULL; + PyObject *res; + Py_ssize_t nposargs, nkwargs, i; + + if (nargsf != 0 && args == NULL) { + PyErr_BadInternalCall(); + goto error; + } + if (kwnames != NULL && !PyTuple_Check(kwnames)) { + PyErr_BadInternalCall(); + goto error; + } + + nposargs = (Py_ssize_t)PyVectorcall_NARGS(nargsf); + if (kwnames) { + nkwargs = PyTuple_GET_SIZE(kwnames); + } + else { + nkwargs = 0; + } + + posargs = PyTuple_New(nposargs); + if (posargs == NULL) { + goto error; + } + if (nposargs) { + for (i=0; i < nposargs; i++) { + PyTuple_SET_ITEM(posargs, i, Py_NewRef(*args)); + args++; + } + } + + if (nkwargs) { + kwargs = PyDict_New(); + if (kwargs == NULL) { + goto error; + } + + for (i = 0; i < nkwargs; i++) { + PyObject *key = PyTuple_GET_ITEM(kwnames, i); + PyObject *value = *args; + args++; + if (PyDict_SetItem(kwargs, key, value) < 0) { + goto error; + } + } + } + else { + kwargs = NULL; + } + + res = PyObject_Call(callable, posargs, kwargs); + Py_DECREF(posargs); + Py_XDECREF(kwargs); + return res; + +error: + Py_DECREF(posargs); + Py_XDECREF(kwargs); + return NULL; +#endif +} +#endif + + +// gh-106521 added PyObject_GetOptionalAttr() and +// PyObject_GetOptionalAttrString() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int +PyObject_GetOptionalAttr(PyObject *obj, PyObject *attr_name, PyObject **result) +{ + // bpo-32571 added _PyObject_LookupAttr() to Python 3.7.0b1 +#if PY_VERSION_HEX >= 0x030700B1 && !defined(PYPY_VERSION) + return _PyObject_LookupAttr(obj, attr_name, result); +#else + *result = PyObject_GetAttr(obj, attr_name); + if (*result != NULL) { + return 1; + } + if (!PyErr_Occurred()) { + return 0; + } + if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + return 0; + } + return -1; +#endif +} + +static inline int +PyObject_GetOptionalAttrString(PyObject *obj, const char *attr_name, PyObject **result) +{ + PyObject *name_obj; + int rc; +#if PY_VERSION_HEX >= 0x03000000 + name_obj = PyUnicode_FromString(attr_name); +#else + name_obj = PyString_FromString(attr_name); +#endif + if (name_obj == NULL) { + *result = NULL; + return -1; + } + rc = PyObject_GetOptionalAttr(obj, name_obj, result); + Py_DECREF(name_obj); + return rc; +} +#endif + + +// gh-106307 added PyObject_GetOptionalAttr() and +// PyMapping_GetOptionalItemString() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int +PyMapping_GetOptionalItem(PyObject *obj, PyObject *key, PyObject **result) +{ + *result = PyObject_GetItem(obj, key); + if (*result) { + return 1; + } + if (!PyErr_ExceptionMatches(PyExc_KeyError)) { + return -1; + } + PyErr_Clear(); + return 0; +} + +static inline int +PyMapping_GetOptionalItemString(PyObject *obj, const char *key, PyObject **result) +{ + PyObject *key_obj; + int rc; +#if PY_VERSION_HEX >= 0x03000000 + key_obj = PyUnicode_FromString(key); +#else + key_obj = PyString_FromString(key); +#endif + if (key_obj == NULL) { + *result = NULL; + return -1; + } + rc = PyMapping_GetOptionalItem(obj, key_obj, result); + Py_DECREF(key_obj); + return rc; +} +#endif + +// gh-108511 added PyMapping_HasKeyWithError() and +// PyMapping_HasKeyStringWithError() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int +PyMapping_HasKeyWithError(PyObject *obj, PyObject *key) +{ + PyObject *res; + int rc = PyMapping_GetOptionalItem(obj, key, &res); + Py_XDECREF(res); + return rc; +} + +static inline int +PyMapping_HasKeyStringWithError(PyObject *obj, const char *key) +{ + PyObject *res; + int rc = PyMapping_GetOptionalItemString(obj, key, &res); + Py_XDECREF(res); + return rc; +} +#endif + + +// gh-108511 added PyObject_HasAttrWithError() and +// PyObject_HasAttrStringWithError() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int +PyObject_HasAttrWithError(PyObject *obj, PyObject *attr) +{ + PyObject *res; + int rc = PyObject_GetOptionalAttr(obj, attr, &res); + Py_XDECREF(res); + return rc; +} + +static inline int +PyObject_HasAttrStringWithError(PyObject *obj, const char *attr) +{ + PyObject *res; + int rc = PyObject_GetOptionalAttrString(obj, attr, &res); + Py_XDECREF(res); + return rc; +} +#endif + + +// gh-106004 added PyDict_GetItemRef() and PyDict_GetItemStringRef() +// to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int +PyDict_GetItemRef(PyObject *mp, PyObject *key, PyObject **result) +{ +#if PY_VERSION_HEX >= 0x03000000 + PyObject *item = PyDict_GetItemWithError(mp, key); +#else + PyObject *item = _PyDict_GetItemWithError(mp, key); +#endif + if (item != NULL) { + *result = Py_NewRef(item); + return 1; // found + } + if (!PyErr_Occurred()) { + *result = NULL; + return 0; // not found + } + *result = NULL; + return -1; +} + +static inline int +PyDict_GetItemStringRef(PyObject *mp, const char *key, PyObject **result) +{ + int res; +#if PY_VERSION_HEX >= 0x03000000 + PyObject *key_obj = PyUnicode_FromString(key); +#else + PyObject *key_obj = PyString_FromString(key); +#endif + if (key_obj == NULL) { + *result = NULL; + return -1; + } + res = PyDict_GetItemRef(mp, key_obj, result); + Py_DECREF(key_obj); + return res; +} +#endif + + +// gh-106307 added PyModule_Add() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int +PyModule_Add(PyObject *mod, const char *name, PyObject *value) +{ + int res = PyModule_AddObjectRef(mod, name, value); + Py_XDECREF(value); + return res; +} +#endif + + +// gh-108014 added Py_IsFinalizing() to Python 3.13.0a1 +// bpo-1856 added _Py_Finalizing to Python 3.2.1b1. +// _Py_IsFinalizing() was added to PyPy 7.3.0. +#if (0x030201B1 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x030D00A1) \ + && (!defined(PYPY_VERSION_NUM) || PYPY_VERSION_NUM >= 0x7030000) +static inline int Py_IsFinalizing(void) +{ +#if PY_VERSION_HEX >= 0x030700A1 + // _Py_IsFinalizing() was added to Python 3.7.0a1. + return _Py_IsFinalizing(); +#else + return (_Py_Finalizing != NULL); +#endif +} +#endif + + +// gh-108323 added PyDict_ContainsString() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int PyDict_ContainsString(PyObject *op, const char *key) +{ + PyObject *key_obj = PyUnicode_FromString(key); + if (key_obj == NULL) { + return -1; + } + int res = PyDict_Contains(op, key_obj); + Py_DECREF(key_obj); + return res; +} +#endif + + +// gh-108445 added PyLong_AsInt() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int PyLong_AsInt(PyObject *obj) +{ +#ifdef PYPY_VERSION + long value = PyLong_AsLong(obj); + if (value == -1 && PyErr_Occurred()) { + return -1; + } + if (value < (long)INT_MIN || (long)INT_MAX < value) { + PyErr_SetString(PyExc_OverflowError, + "Python int too large to convert to C int"); + return -1; + } + return (int)value; +#else + return _PyLong_AsInt(obj); +#endif +} +#endif + + +// gh-107073 added PyObject_VisitManagedDict() to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int +PyObject_VisitManagedDict(PyObject *obj, visitproc visit, void *arg) +{ + PyObject **dict = _PyObject_GetDictPtr(obj); + if (*dict == NULL) { + return -1; + } + Py_VISIT(*dict); + return 0; +} + +static inline void +PyObject_ClearManagedDict(PyObject *obj) +{ + PyObject **dict = _PyObject_GetDictPtr(obj); + if (*dict == NULL) { + return; + } + Py_CLEAR(*dict); +} +#endif + +// gh-108867 added PyThreadState_GetUnchecked() to Python 3.13.0a1 +// Python 3.5.2 added _PyThreadState_UncheckedGet(). +#if PY_VERSION_HEX >= 0x03050200 && PY_VERSION_HEX < 0x030D00A1 +static inline PyThreadState* +PyThreadState_GetUnchecked(void) +{ + return _PyThreadState_UncheckedGet(); +} +#endif + +// gh-110289 added PyUnicode_EqualToUTF8() and PyUnicode_EqualToUTF8AndSize() +// to Python 3.13.0a1 +#if PY_VERSION_HEX < 0x030D00A1 +static inline int +PyUnicode_EqualToUTF8AndSize(PyObject *unicode, const char *str, Py_ssize_t str_len) +{ + Py_ssize_t len; + const void *utf8; + PyObject *exc_type, *exc_value, *exc_tb; + int res; + + // API cannot report errors so save/restore the exception + PyErr_Fetch(&exc_type, &exc_value, &exc_tb); + + // Python 3.3.0a1 added PyUnicode_AsUTF8AndSize() +#if PY_VERSION_HEX >= 0x030300A1 + if (PyUnicode_IS_ASCII(unicode)) { + utf8 = PyUnicode_DATA(unicode); + len = PyUnicode_GET_LENGTH(unicode); + } + else { + utf8 = PyUnicode_AsUTF8AndSize(unicode, &len); + if (utf8 == NULL) { + // Memory allocation failure. The API cannot report error, + // so ignore the exception and return 0. + res = 0; + goto done; + } + } + + if (len != str_len) { + res = 0; + goto done; + } + res = (memcmp(utf8, str, (size_t)len) == 0); +#else + PyObject *bytes = PyUnicode_AsUTF8String(unicode); + if (bytes == NULL) { + // Memory allocation failure. The API cannot report error, + // so ignore the exception and return 0. + res = 0; + goto done; + } + +#if PY_VERSION_HEX >= 0x03000000 + len = PyBytes_GET_SIZE(bytes); + utf8 = PyBytes_AS_STRING(bytes); +#else + len = PyString_GET_SIZE(bytes); + utf8 = PyString_AS_STRING(bytes); +#endif + if (len != str_len) { + Py_DECREF(bytes); + res = 0; + goto done; + } + + res = (memcmp(utf8, str, (size_t)len) == 0); + Py_DECREF(bytes); +#endif + +done: + PyErr_Restore(exc_type, exc_value, exc_tb); + return res; +} + +static inline int +PyUnicode_EqualToUTF8(PyObject *unicode, const char *str) +{ + return PyUnicode_EqualToUTF8AndSize(unicode, str, (Py_ssize_t)strlen(str)); +} +#endif + + +// gh-111138 added PyList_Extend() and PyList_Clear() to Python 3.13.0a2 +#if PY_VERSION_HEX < 0x030D00A2 +static inline int +PyList_Extend(PyObject *list, PyObject *iterable) +{ + return PyList_SetSlice(list, PY_SSIZE_T_MAX, PY_SSIZE_T_MAX, iterable); +} + +static inline int +PyList_Clear(PyObject *list) +{ + return PyList_SetSlice(list, 0, PY_SSIZE_T_MAX, NULL); +} +#endif + +// gh-111262 added PyDict_Pop() and PyDict_PopString() to Python 3.13.0a2 +#if PY_VERSION_HEX < 0x030D00A2 +static inline int +PyDict_Pop(PyObject *dict, PyObject *key, PyObject **result) +{ + PyObject *value; + + if (!PyDict_Check(dict)) { + PyErr_BadInternalCall(); + if (result) { + *result = NULL; + } + return -1; + } + + // bpo-16991 added _PyDict_Pop() to Python 3.5.0b2. + // Python 3.6.0b3 changed _PyDict_Pop() first argument type to PyObject*. + // Python 3.13.0a1 removed _PyDict_Pop(). +#if defined(PYPY_VERSION) || PY_VERSION_HEX < 0x030500b2 || PY_VERSION_HEX >= 0x030D0000 + value = PyObject_CallMethod(dict, "pop", "O", key); +#elif PY_VERSION_HEX < 0x030600b3 + value = _PyDict_Pop(_Py_CAST(PyDictObject*, dict), key, NULL); +#else + value = _PyDict_Pop(dict, key, NULL); +#endif + if (value == NULL) { + if (result) { + *result = NULL; + } + if (PyErr_Occurred() && !PyErr_ExceptionMatches(PyExc_KeyError)) { + return -1; + } + PyErr_Clear(); + return 0; + } + if (result) { + *result = value; + } + else { + Py_DECREF(value); + } + return 1; +} + +static inline int +PyDict_PopString(PyObject *dict, const char *key, PyObject **result) +{ + PyObject *key_obj = PyUnicode_FromString(key); + if (key_obj == NULL) { + if (result != NULL) { + *result = NULL; + } + return -1; + } + + int res = PyDict_Pop(dict, key_obj, result); + Py_DECREF(key_obj); + return res; +} +#endif + + +#if PY_VERSION_HEX < 0x030200A4 +// Python 3.2.0a4 added Py_hash_t type +typedef Py_ssize_t Py_hash_t; +#endif + + +// gh-111545 added Py_HashPointer() to Python 3.13.0a3 +#if PY_VERSION_HEX < 0x030D00A3 +static inline Py_hash_t Py_HashPointer(const void *ptr) +{ +#if PY_VERSION_HEX >= 0x030900A4 && !defined(PYPY_VERSION) + return _Py_HashPointer(ptr); +#else + return _Py_HashPointer(_Py_CAST(void*, ptr)); +#endif +} +#endif + + +// Python 3.13a4 added a PyTime API. +// Use the private API added to Python 3.5. +#if PY_VERSION_HEX < 0x030D00A4 && PY_VERSION_HEX >= 0x03050000 +typedef _PyTime_t PyTime_t; +#define PyTime_MIN _PyTime_MIN +#define PyTime_MAX _PyTime_MAX + +static inline double PyTime_AsSecondsDouble(PyTime_t t) +{ return _PyTime_AsSecondsDouble(t); } + +static inline int PyTime_Monotonic(PyTime_t *result) +{ return _PyTime_GetMonotonicClockWithInfo(result, NULL); } + +static inline int PyTime_Time(PyTime_t *result) +{ return _PyTime_GetSystemClockWithInfo(result, NULL); } + +static inline int PyTime_PerfCounter(PyTime_t *result) +{ +#if PY_VERSION_HEX >= 0x03070000 && !defined(PYPY_VERSION) + return _PyTime_GetPerfCounterWithInfo(result, NULL); +#elif PY_VERSION_HEX >= 0x03070000 + // Call time.perf_counter_ns() and convert Python int object to PyTime_t. + // Cache time.perf_counter_ns() function for best performance. + static PyObject *func = NULL; + if (func == NULL) { + PyObject *mod = PyImport_ImportModule("time"); + if (mod == NULL) { + return -1; + } + + func = PyObject_GetAttrString(mod, "perf_counter_ns"); + Py_DECREF(mod); + if (func == NULL) { + return -1; + } + } + + PyObject *res = PyObject_CallNoArgs(func); + if (res == NULL) { + return -1; + } + long long value = PyLong_AsLongLong(res); + Py_DECREF(res); + + if (value == -1 && PyErr_Occurred()) { + return -1; + } + + Py_BUILD_ASSERT(sizeof(value) >= sizeof(PyTime_t)); + *result = (PyTime_t)value; + return 0; +#else + // Call time.perf_counter() and convert C double to PyTime_t. + // Cache time.perf_counter() function for best performance. + static PyObject *func = NULL; + if (func == NULL) { + PyObject *mod = PyImport_ImportModule("time"); + if (mod == NULL) { + return -1; + } + + func = PyObject_GetAttrString(mod, "perf_counter"); + Py_DECREF(mod); + if (func == NULL) { + return -1; + } + } + + PyObject *res = PyObject_CallNoArgs(func); + if (res == NULL) { + return -1; + } + double d = PyFloat_AsDouble(res); + Py_DECREF(res); + + if (d == -1.0 && PyErr_Occurred()) { + return -1; + } + + // Avoid floor() to avoid having to link to libm + *result = (PyTime_t)(d * 1e9); + return 0; +#endif +} + +#endif + +// gh-111389 added hash constants to Python 3.13.0a5. These constants were +// added first as private macros to Python 3.4.0b1 and PyPy 7.3.9. +#if (!defined(PyHASH_BITS) \ + && ((!defined(PYPY_VERSION) && PY_VERSION_HEX >= 0x030400B1) \ + || (defined(PYPY_VERSION) && PY_VERSION_HEX >= 0x03070000 \ + && PYPY_VERSION_NUM >= 0x07090000))) +# define PyHASH_BITS _PyHASH_BITS +# define PyHASH_MODULUS _PyHASH_MODULUS +# define PyHASH_INF _PyHASH_INF +# define PyHASH_IMAG _PyHASH_IMAG +#endif + + +// gh-111545 added Py_GetConstant() and Py_GetConstantBorrowed() +// to Python 3.13.0a6 +#if PY_VERSION_HEX < 0x030D00A6 && !defined(Py_CONSTANT_NONE) + +#define Py_CONSTANT_NONE 0 +#define Py_CONSTANT_FALSE 1 +#define Py_CONSTANT_TRUE 2 +#define Py_CONSTANT_ELLIPSIS 3 +#define Py_CONSTANT_NOT_IMPLEMENTED 4 +#define Py_CONSTANT_ZERO 5 +#define Py_CONSTANT_ONE 6 +#define Py_CONSTANT_EMPTY_STR 7 +#define Py_CONSTANT_EMPTY_BYTES 8 +#define Py_CONSTANT_EMPTY_TUPLE 9 + +static inline PyObject* Py_GetConstant(unsigned int constant_id) +{ + static PyObject* constants[Py_CONSTANT_EMPTY_TUPLE + 1] = {NULL}; + + if (constants[Py_CONSTANT_NONE] == NULL) { + constants[Py_CONSTANT_NONE] = Py_None; + constants[Py_CONSTANT_FALSE] = Py_False; + constants[Py_CONSTANT_TRUE] = Py_True; + constants[Py_CONSTANT_ELLIPSIS] = Py_Ellipsis; + constants[Py_CONSTANT_NOT_IMPLEMENTED] = Py_NotImplemented; + + constants[Py_CONSTANT_ZERO] = PyLong_FromLong(0); + if (constants[Py_CONSTANT_ZERO] == NULL) { + goto fatal_error; + } + + constants[Py_CONSTANT_ONE] = PyLong_FromLong(1); + if (constants[Py_CONSTANT_ONE] == NULL) { + goto fatal_error; + } + + constants[Py_CONSTANT_EMPTY_STR] = PyUnicode_FromStringAndSize("", 0); + if (constants[Py_CONSTANT_EMPTY_STR] == NULL) { + goto fatal_error; + } + + constants[Py_CONSTANT_EMPTY_BYTES] = PyBytes_FromStringAndSize("", 0); + if (constants[Py_CONSTANT_EMPTY_BYTES] == NULL) { + goto fatal_error; + } + + constants[Py_CONSTANT_EMPTY_TUPLE] = PyTuple_New(0); + if (constants[Py_CONSTANT_EMPTY_TUPLE] == NULL) { + goto fatal_error; + } + // goto dance to avoid compiler warnings about Py_FatalError() + goto init_done; + +fatal_error: + // This case should never happen + Py_FatalError("Py_GetConstant() failed to get constants"); + } + +init_done: + if (constant_id <= Py_CONSTANT_EMPTY_TUPLE) { + return Py_NewRef(constants[constant_id]); + } + else { + PyErr_BadInternalCall(); + return NULL; + } +} + +static inline PyObject* Py_GetConstantBorrowed(unsigned int constant_id) +{ + PyObject *obj = Py_GetConstant(constant_id); + Py_XDECREF(obj); + return obj; +} +#endif + + +// gh-114329 added PyList_GetItemRef() to Python 3.13.0a4 +#if PY_VERSION_HEX < 0x030D00A4 +static inline PyObject * +PyList_GetItemRef(PyObject *op, Py_ssize_t index) +{ + PyObject *item = PyList_GetItem(op, index); + Py_XINCREF(item); + return item; +} +#endif + + +// gh-114329 added PyList_GetItemRef() to Python 3.13.0a4 +#if PY_VERSION_HEX < 0x030D00A4 +static inline int +PyDict_SetDefaultRef(PyObject *d, PyObject *key, PyObject *default_value, + PyObject **result) +{ + PyObject *value; + if (PyDict_GetItemRef(d, key, &value) < 0) { + // get error + if (result) { + *result = NULL; + } + return -1; + } + if (value != NULL) { + // present + if (result) { + *result = value; + } + else { + Py_DECREF(value); + } + return 1; + } + + // missing: set the item + if (PyDict_SetItem(d, key, default_value) < 0) { + // set error + if (result) { + *result = NULL; + } + return -1; + } + if (result) { + *result = Py_NewRef(default_value); + } + return 0; +} +#endif + + +// gh-116560 added PyLong_GetSign() to Python 3.14a4 +#if PY_VERSION_HEX < 0x030E00A1 +static inline int PyLong_GetSign(PyObject *obj, int *sign) +{ + if (!PyLong_Check(obj)) { + PyErr_Format(PyExc_TypeError, "expect int, got %s", Py_TYPE(obj)->tp_name); + return -1; + } + + *sign = _PyLong_Sign(obj); + return 0; +} +#endif + + #ifdef __cplusplus } #endif diff --git a/mypyc/lib-rt/pythonsupport.c b/mypyc/lib-rt/pythonsupport.c new file mode 100644 index 0000000000000..90fb69705a00a --- /dev/null +++ b/mypyc/lib-rt/pythonsupport.c @@ -0,0 +1,106 @@ +// Collects code that was copied in from cpython, for a couple of different reasons: +// * We wanted to modify it to produce a more efficient version for our uses +// * We needed to call it and it was static :( +// * We wanted to call it and needed to backport it + +#include "pythonsupport.h" + +#if CPY_3_12_FEATURES + +// Slow path of CPyLong_AsSsize_tAndOverflow (non-inlined) +Py_ssize_t +CPyLong_AsSsize_tAndOverflow_(PyObject *vv, int *overflow) +{ + PyLongObject *v = (PyLongObject *)vv; + size_t x, prev; + Py_ssize_t res; + Py_ssize_t i; + int sign; + + *overflow = 0; + + res = -1; + i = CPY_LONG_TAG(v); + + sign = 1; + x = 0; + if (i & CPY_SIGN_NEGATIVE) { + sign = -1; + } + i >>= CPY_NON_SIZE_BITS; + while (--i >= 0) { + prev = x; + x = (x << PyLong_SHIFT) + CPY_LONG_DIGIT(v, i); + if ((x >> PyLong_SHIFT) != prev) { + *overflow = sign; + goto exit; + } + } + /* Haven't lost any bits, but casting to long requires extra + * care. + */ + if (x <= (size_t)CPY_TAGGED_MAX) { + res = (Py_ssize_t)x * sign; + } + else if (sign < 0 && x == CPY_TAGGED_ABS_MIN) { + res = CPY_TAGGED_MIN; + } + else { + *overflow = sign; + /* res is already set to -1 */ + } + exit: + return res; +} + +#else + +// Slow path of CPyLong_AsSsize_tAndOverflow (non-inlined, Python 3.11 and earlier) +Py_ssize_t +CPyLong_AsSsize_tAndOverflow_(PyObject *vv, int *overflow) +{ + /* This version by Tim Peters */ + PyLongObject *v = (PyLongObject *)vv; + size_t x, prev; + Py_ssize_t res; + Py_ssize_t i; + int sign; + + *overflow = 0; + + res = -1; + i = Py_SIZE(v); + + sign = 1; + x = 0; + if (i < 0) { + sign = -1; + i = -(i); + } + while (--i >= 0) { + prev = x; + x = (x << PyLong_SHIFT) + CPY_LONG_DIGIT(v, i); + if ((x >> PyLong_SHIFT) != prev) { + *overflow = sign; + goto exit; + } + } + /* Haven't lost any bits, but casting to long requires extra + * care. + */ + if (x <= (size_t)CPY_TAGGED_MAX) { + res = (Py_ssize_t)x * sign; + } + else if (sign < 0 && x == CPY_TAGGED_ABS_MIN) { + res = CPY_TAGGED_MIN; + } + else { + *overflow = sign; + /* res is already set to -1 */ + } + exit: + return res; +} + + +#endif diff --git a/mypyc/lib-rt/pythonsupport.h b/mypyc/lib-rt/pythonsupport.h index f7d501f44a27b..85f9ec64ac908 100644 --- a/mypyc/lib-rt/pythonsupport.h +++ b/mypyc/lib-rt/pythonsupport.h @@ -129,6 +129,9 @@ init_subclass(PyTypeObject *type, PyObject *kwds) return 0; } +Py_ssize_t +CPyLong_AsSsize_tAndOverflow_(PyObject *vv, int *overflow); + #if CPY_3_12_FEATURES static inline Py_ssize_t @@ -136,10 +139,8 @@ CPyLong_AsSsize_tAndOverflow(PyObject *vv, int *overflow) { /* This version by Tim Peters */ PyLongObject *v = (PyLongObject *)vv; - size_t x, prev; Py_ssize_t res; Py_ssize_t i; - int sign; *overflow = 0; @@ -154,35 +155,12 @@ CPyLong_AsSsize_tAndOverflow(PyObject *vv, int *overflow) } else if (i == ((1 << CPY_NON_SIZE_BITS) | CPY_SIGN_NEGATIVE)) { res = -(sdigit)CPY_LONG_DIGIT(v, 0); } else { - sign = 1; - x = 0; - if (i & CPY_SIGN_NEGATIVE) { - sign = -1; - } - i >>= CPY_NON_SIZE_BITS; - while (--i >= 0) { - prev = x; - x = (x << PyLong_SHIFT) + CPY_LONG_DIGIT(v, i); - if ((x >> PyLong_SHIFT) != prev) { - *overflow = sign; - goto exit; - } - } - /* Haven't lost any bits, but casting to long requires extra - * care (see comment above). - */ - if (x <= (size_t)CPY_TAGGED_MAX) { - res = (Py_ssize_t)x * sign; - } - else if (sign < 0 && x == CPY_TAGGED_ABS_MIN) { - res = CPY_TAGGED_MIN; - } - else { - *overflow = sign; - /* res is already set to -1 */ - } + // Slow path is moved to a non-inline helper function to + // limit size of generated code + int overflow_local; + res = CPyLong_AsSsize_tAndOverflow_(vv, &overflow_local); + *overflow = overflow_local; } - exit: return res; } @@ -204,10 +182,8 @@ CPyLong_AsSsize_tAndOverflow(PyObject *vv, int *overflow) { /* This version by Tim Peters */ PyLongObject *v = (PyLongObject *)vv; - size_t x, prev; Py_ssize_t res; Py_ssize_t i; - int sign; *overflow = 0; @@ -221,35 +197,12 @@ CPyLong_AsSsize_tAndOverflow(PyObject *vv, int *overflow) } else if (i == -1) { res = -(sdigit)CPY_LONG_DIGIT(v, 0); } else { - sign = 1; - x = 0; - if (i < 0) { - sign = -1; - i = -(i); - } - while (--i >= 0) { - prev = x; - x = (x << PyLong_SHIFT) + CPY_LONG_DIGIT(v, i); - if ((x >> PyLong_SHIFT) != prev) { - *overflow = sign; - goto exit; - } - } - /* Haven't lost any bits, but casting to long requires extra - * care (see comment above). - */ - if (x <= (size_t)CPY_TAGGED_MAX) { - res = (Py_ssize_t)x * sign; - } - else if (sign < 0 && x == CPY_TAGGED_ABS_MIN) { - res = CPY_TAGGED_MIN; - } - else { - *overflow = sign; - /* res is already set to -1 */ - } + // Slow path is moved to a non-inline helper function to + // limit size of generated code + int overflow_local; + res = CPyLong_AsSsize_tAndOverflow_(vv, &overflow_local); + *overflow = overflow_local; } - exit: return res; } diff --git a/mypyc/lib-rt/setup.py b/mypyc/lib-rt/setup.py index ef81b794c9bdd..66b130581cb30 100644 --- a/mypyc/lib-rt/setup.py +++ b/mypyc/lib-rt/setup.py @@ -58,6 +58,7 @@ def run(self): "list_ops.c", "exc_ops.c", "generic_ops.c", + "pythonsupport.c", ], depends=["CPy.h", "mypyc_util.h", "pythonsupport.h"], extra_compile_args=["-Wno-unused-function", "-Wno-sign-compare"] + compile_args, diff --git a/mypyc/primitives/generic_ops.py b/mypyc/primitives/generic_ops.py index 3caec0a9875ee..fe42767db11e5 100644 --- a/mypyc/primitives/generic_ops.py +++ b/mypyc/primitives/generic_ops.py @@ -178,7 +178,7 @@ ) # obj1[obj2] -method_op( +py_get_item_op = method_op( name="__getitem__", arg_types=[object_rprimitive, object_rprimitive], return_type=object_rprimitive, diff --git a/mypyc/primitives/misc_ops.py b/mypyc/primitives/misc_ops.py index fea62bbb19c47..e9016e24c46d8 100644 --- a/mypyc/primitives/misc_ops.py +++ b/mypyc/primitives/misc_ops.py @@ -265,3 +265,15 @@ return_type=c_pyssize_t_rprimitive, error_kind=ERR_NEVER, ) + +# Set the lazy value compute function of an TypeAliasType instance (Python 3.12+). +# This must only be used as part of initializing the object. Any existing value +# will be cleared. +set_type_alias_compute_function_op = custom_primitive_op( + name="set_type_alias_compute_function", + c_function_name="CPy_SetTypeAliasTypeComputeFunction", + # (alias object, value compute function) + arg_types=[object_rprimitive, object_rprimitive], + return_type=void_rtype, + error_kind=ERR_NEVER, +) diff --git a/mypyc/test-data/fixtures/ir.py b/mypyc/test-data/fixtures/ir.py index bf06613ad2a89..ac95ffe2c047a 100644 --- a/mypyc/test-data/fixtures/ir.py +++ b/mypyc/test-data/fixtures/ir.py @@ -7,12 +7,12 @@ overload, Mapping, Union, Callable, Sequence, FrozenSet, Protocol ) -T = TypeVar('T') +_T = TypeVar('_T') T_co = TypeVar('T_co', covariant=True) T_contra = TypeVar('T_contra', contravariant=True) -S = TypeVar('S') -K = TypeVar('K') # for keys in mapping -V = TypeVar('V') # for values in mapping +_S = TypeVar('_S') +_K = TypeVar('_K') # for keys in mapping +_V = TypeVar('_V') # for values in mapping class __SupportsAbs(Protocol[T_co]): def __abs__(self) -> T_co: pass @@ -45,6 +45,7 @@ def __ne__(self, x: object) -> bool: pass class type: def __init__(self, o: object) -> None: ... + def __or__(self, o: object) -> Any: ... __name__ : str __annotations__: Dict[str, Any] @@ -199,76 +200,76 @@ def __contains__(self, item: object) -> int: ... class function: pass -class list(Generic[T], Sequence[T], Iterable[T]): - def __init__(self, i: Optional[Iterable[T]] = None) -> None: pass +class list(Generic[_T], Sequence[_T], Iterable[_T]): + def __init__(self, i: Optional[Iterable[_T]] = None) -> None: pass @overload - def __getitem__(self, i: int) -> T: ... + def __getitem__(self, i: int) -> _T: ... @overload - def __getitem__(self, s: slice) -> List[T]: ... - def __setitem__(self, i: int, o: T) -> None: pass + def __getitem__(self, s: slice) -> List[_T]: ... + def __setitem__(self, i: int, o: _T) -> None: pass def __delitem__(self, i: int) -> None: pass - def __mul__(self, i: int) -> List[T]: pass - def __rmul__(self, i: int) -> List[T]: pass - def __iter__(self) -> Iterator[T]: pass + def __mul__(self, i: int) -> List[_T]: pass + def __rmul__(self, i: int) -> List[_T]: pass + def __iter__(self) -> Iterator[_T]: pass def __len__(self) -> int: pass def __contains__(self, item: object) -> int: ... - def __add__(self, x: List[T]) -> List[T]: ... - def append(self, x: T) -> None: pass - def pop(self, i: int = -1) -> T: pass - def count(self, T) -> int: pass - def extend(self, l: Iterable[T]) -> None: pass - def insert(self, i: int, x: T) -> None: pass + def __add__(self, x: List[_T]) -> List[_T]: ... + def append(self, x: _T) -> None: pass + def pop(self, i: int = -1) -> _T: pass + def count(self, _T) -> int: pass + def extend(self, l: Iterable[_T]) -> None: pass + def insert(self, i: int, x: _T) -> None: pass def sort(self) -> None: pass def reverse(self) -> None: pass - def remove(self, o: T) -> None: pass - def index(self, o: T) -> int: pass + def remove(self, o: _T) -> None: pass + def index(self, o: _T) -> int: pass -class dict(Mapping[K, V]): +class dict(Mapping[_K, _V]): @overload - def __init__(self, **kwargs: K) -> None: ... + def __init__(self, **kwargs: _K) -> None: ... @overload - def __init__(self, map: Mapping[K, V], **kwargs: V) -> None: ... + def __init__(self, map: Mapping[_K, _V], **kwargs: _V) -> None: ... @overload - def __init__(self, iterable: Iterable[Tuple[K, V]], **kwargs: V) -> None: ... - def __getitem__(self, key: K) -> V: pass - def __setitem__(self, k: K, v: V) -> None: pass - def __delitem__(self, k: K) -> None: pass + def __init__(self, iterable: Iterable[Tuple[_K, _V]], **kwargs: _V) -> None: ... + def __getitem__(self, key: _K) -> _V: pass + def __setitem__(self, k: _K, v: _V) -> None: pass + def __delitem__(self, k: _K) -> None: pass def __contains__(self, item: object) -> int: pass - def __iter__(self) -> Iterator[K]: pass + def __iter__(self) -> Iterator[_K]: pass def __len__(self) -> int: pass @overload - def update(self, __m: Mapping[K, V], **kwargs: V) -> None: pass + def update(self, __m: Mapping[_K, _V], **kwargs: _V) -> None: pass @overload - def update(self, __m: Iterable[Tuple[K, V]], **kwargs: V) -> None: ... + def update(self, __m: Iterable[Tuple[_K, _V]], **kwargs: _V) -> None: ... @overload - def update(self, **kwargs: V) -> None: ... - def pop(self, x: int) -> K: pass - def keys(self) -> Iterable[K]: pass - def values(self) -> Iterable[V]: pass - def items(self) -> Iterable[Tuple[K, V]]: pass + def update(self, **kwargs: _V) -> None: ... + def pop(self, x: int) -> _K: pass + def keys(self) -> Iterable[_K]: pass + def values(self) -> Iterable[_V]: pass + def items(self) -> Iterable[Tuple[_K, _V]]: pass def clear(self) -> None: pass - def copy(self) -> Dict[K, V]: pass - def setdefault(self, key: K, val: V = ...) -> V: pass + def copy(self) -> Dict[_K, _V]: pass + def setdefault(self, key: _K, val: _V = ...) -> _V: pass -class set(Generic[T]): - def __init__(self, i: Optional[Iterable[T]] = None) -> None: pass - def __iter__(self) -> Iterator[T]: pass +class set(Generic[_T]): + def __init__(self, i: Optional[Iterable[_T]] = None) -> None: pass + def __iter__(self) -> Iterator[_T]: pass def __len__(self) -> int: pass - def add(self, x: T) -> None: pass - def remove(self, x: T) -> None: pass - def discard(self, x: T) -> None: pass + def add(self, x: _T) -> None: pass + def remove(self, x: _T) -> None: pass + def discard(self, x: _T) -> None: pass def clear(self) -> None: pass - def pop(self) -> T: pass - def update(self, x: Iterable[S]) -> None: pass - def __or__(self, s: Union[Set[S], FrozenSet[S]]) -> Set[Union[T, S]]: ... - def __xor__(self, s: Union[Set[S], FrozenSet[S]]) -> Set[Union[T, S]]: ... - -class frozenset(Generic[T]): - def __init__(self, i: Optional[Iterable[T]] = None) -> None: pass - def __iter__(self) -> Iterator[T]: pass + def pop(self) -> _T: pass + def update(self, x: Iterable[_S]) -> None: pass + def __or__(self, s: Union[Set[_S], FrozenSet[_S]]) -> Set[Union[_T, _S]]: ... + def __xor__(self, s: Union[Set[_S], FrozenSet[_S]]) -> Set[Union[_T, _S]]: ... + +class frozenset(Generic[_T]): + def __init__(self, i: Optional[Iterable[_T]] = None) -> None: pass + def __iter__(self) -> Iterator[_T]: pass def __len__(self) -> int: pass - def __or__(self, s: Union[Set[S], FrozenSet[S]]) -> FrozenSet[Union[T, S]]: ... - def __xor__(self, s: Union[Set[S], FrozenSet[S]]) -> FrozenSet[Union[T, S]]: ... + def __or__(self, s: Union[Set[_S], FrozenSet[_S]]) -> FrozenSet[Union[_T, _S]]: ... + def __xor__(self, s: Union[Set[_S], FrozenSet[_S]]) -> FrozenSet[Union[_T, _S]]: ... class slice: pass @@ -323,31 +324,31 @@ class OverflowError(ArithmeticError): pass class GeneratorExit(BaseException): pass -def any(i: Iterable[T]) -> bool: pass -def all(i: Iterable[T]) -> bool: pass -def sum(i: Iterable[T]) -> int: pass -def reversed(object: Sequence[T]) -> Iterator[T]: ... +def any(i: Iterable[_T]) -> bool: pass +def all(i: Iterable[_T]) -> bool: pass +def sum(i: Iterable[_T]) -> int: pass +def reversed(object: Sequence[_T]) -> Iterator[_T]: ... def id(o: object) -> int: pass # This type is obviously wrong but the test stubs don't have Sized anymore def len(o: object) -> int: pass def print(*object) -> None: pass def isinstance(x: object, t: object) -> bool: pass -def iter(i: Iterable[T]) -> Iterator[T]: pass +def iter(i: Iterable[_T]) -> Iterator[_T]: pass @overload -def next(i: Iterator[T]) -> T: pass +def next(i: Iterator[_T]) -> _T: pass @overload -def next(i: Iterator[T], default: T) -> T: pass +def next(i: Iterator[_T], default: _T) -> _T: pass def hash(o: object) -> int: ... def globals() -> Dict[str, Any]: ... def getattr(obj: object, name: str, default: Any = None) -> Any: ... def setattr(obj: object, name: str, value: Any) -> None: ... -def enumerate(x: Iterable[T]) -> Iterator[Tuple[int, T]]: ... +def enumerate(x: Iterable[_T]) -> Iterator[Tuple[int, _T]]: ... @overload -def zip(x: Iterable[T], y: Iterable[S]) -> Iterator[Tuple[T, S]]: ... +def zip(x: Iterable[_T], y: Iterable[_S]) -> Iterator[Tuple[_T, _S]]: ... @overload -def zip(x: Iterable[T], y: Iterable[S], z: Iterable[V]) -> Iterator[Tuple[T, S, V]]: ... +def zip(x: Iterable[_T], y: Iterable[_S], z: Iterable[_V]) -> Iterator[Tuple[_T, _S, _V]]: ... def eval(e: str) -> Any: ... -def abs(x: __SupportsAbs[T]) -> T: ... +def abs(x: __SupportsAbs[_T]) -> _T: ... @overload def divmod(x: __SupportsDivMod[T_contra, T_co], y: T_contra) -> T_co: ... @overload @@ -359,8 +360,8 @@ def pow(base: __SupportsPow3NoneOnly[T_contra, T_co], exp: T_contra, mod: None = @overload def pow(base: __SupportsPow3[T_contra, _M, T_co], exp: T_contra, mod: _M) -> T_co: ... def exit() -> None: ... -def min(x: T, y: T) -> T: ... -def max(x: T, y: T) -> T: ... +def min(x: _T, y: _T) -> _T: ... +def max(x: _T, y: _T) -> _T: ... def repr(o: object) -> str: ... def ascii(o: object) -> str: ... def ord(o: object) -> int: ... diff --git a/mypyc/test-data/fixtures/testutil.py b/mypyc/test-data/fixtures/testutil.py index 7f00ee5aea001..f210faf711091 100644 --- a/mypyc/test-data/fixtures/testutil.py +++ b/mypyc/test-data/fixtures/testutil.py @@ -5,7 +5,7 @@ import math from typing import ( Any, Iterator, TypeVar, Generator, Optional, List, Tuple, Sequence, - Union, Callable, Awaitable, + Union, Callable, Awaitable, Generic ) from typing import Final @@ -86,7 +86,7 @@ def run_generator(gen: Generator[T, V, U], F = TypeVar('F', bound=Callable) -class async_val(Awaitable[V]): +class async_val(Awaitable[V], Generic[T, V]): def __init__(self, val: T) -> None: self.val = val diff --git a/mypyc/test-data/fixtures/typing-full.pyi b/mypyc/test-data/fixtures/typing-full.pyi index 52bca09a1decc..6b6aba6802b15 100644 --- a/mypyc/test-data/fixtures/typing-full.pyi +++ b/mypyc/test-data/fixtures/typing-full.pyi @@ -10,26 +10,29 @@ from abc import abstractmethod, ABCMeta class GenericMeta(type): pass +class _SpecialForm: + def __getitem__(self, index): ... + cast = 0 overload = 0 -Any = 0 -Union = 0 +Any = object() Optional = 0 TypeVar = 0 Generic = 0 Protocol = 0 Tuple = 0 -Callable = 0 _promote = 0 NamedTuple = 0 Type = 0 no_type_check = 0 ClassVar = 0 Final = 0 -Literal = 0 TypedDict = 0 NoReturn = 0 NewType = 0 +Callable: _SpecialForm +Union: _SpecialForm +Literal: _SpecialForm T = TypeVar('T') T_co = TypeVar('T_co', covariant=True) @@ -167,3 +170,6 @@ class _TypedDict(Mapping[str, object]): def pop(self, k: NoReturn, default: T = ...) -> object: ... def update(self: T, __m: T) -> None: ... def __delitem__(self, k: NoReturn) -> None: ... + +class TypeAliasType: + pass diff --git a/mypyc/test-data/irbuild-set.test b/mypyc/test-data/irbuild-set.test index 1ac638754a8b7..110801b78a660 100644 --- a/mypyc/test-data/irbuild-set.test +++ b/mypyc/test-data/irbuild-set.test @@ -39,7 +39,10 @@ L0: return r0 [case testNewSetFromIterable] -from typing import Set, List +from typing import Set, List, TypeVar + +T = TypeVar("T") + def f(l: List[T]) -> Set[T]: return set(l) [out] diff --git a/mypyc/test-data/run-loops.test b/mypyc/test-data/run-loops.test index 994b30b423471..6f7d79059a6d5 100644 --- a/mypyc/test-data/run-loops.test +++ b/mypyc/test-data/run-loops.test @@ -276,7 +276,10 @@ for k in range(12): [out] [case testForIterable] -from typing import Iterable, Dict, Any, Tuple +from typing import Iterable, Dict, Any, Tuple, TypeVar + +T = TypeVar("T") + def iterate_over_any(a: Any) -> None: for element in a: print(element) @@ -350,13 +353,13 @@ iterate_over_tuple((1, 2, 3)) Traceback (most recent call last): File "driver.py", line 16, in iterate_over_any(5) - File "native.py", line 3, in iterate_over_any + File "native.py", line 6, in iterate_over_any for element in a: TypeError: 'int' object is not iterable Traceback (most recent call last): File "driver.py", line 20, in iterate_over_iterable(broken_generator(5)) - File "native.py", line 7, in iterate_over_iterable + File "native.py", line 10, in iterate_over_iterable for element in iterable: File "driver.py", line 8, in broken_generator raise Exception('Exception Manually Raised') @@ -364,7 +367,7 @@ Exception: Exception Manually Raised Traceback (most recent call last): File "driver.py", line 24, in iterate_and_delete(d) - File "native.py", line 11, in iterate_and_delete + File "native.py", line 14, in iterate_and_delete for key in d: RuntimeError: dictionary changed size during iteration 15 diff --git a/mypyc/test-data/run-python312.test b/mypyc/test-data/run-python312.test new file mode 100644 index 0000000000000..5e8a388fd8d37 --- /dev/null +++ b/mypyc/test-data/run-python312.test @@ -0,0 +1,225 @@ +[case testPEP695Basics] +# flags: --enable-incomplete-feature=NewGenericSyntax +from typing import Any, TypeAliasType, cast + +from testutil import assertRaises + +def id[T](x: T) -> T: + return x + +def test_call_generic_function() -> None: + assert id(2) == 2 + assert id('x') == 'x' + +class C[T]: + x: T + + def __init__(self, x: T) -> None: + self.x = x + +class D[T, S]: + x: T + y: S + + def __init__(self, x: T, y: S) -> None: + self.x = x + self.y = y + + def set(self, x: object, y: object) -> None: + self.x = cast(T, x) + self.y = cast(S, y) + +def test_generic_class() -> None: + c = C(5) + assert c.x == 5 + c2 = C[str]('x') + assert c2.x == 'x' + d = D[str, int]('a', 5) + assert d.x == 'a' + assert d.y == 5 + d.set('b', 6) + assert d.x == 'b' + assert d.y == 6 + +def test_generic_class_via_any() -> None: + c_any: Any = C + c = c_any(2) + assert c.x == 2 + c2 = c_any[str]('y') + assert c2.x == 'y' + assert str(c_any[str]) == 'native.C[str]' + + d_any: Any = D + d = d_any(1, 'x') + assert d.x == 1 + assert d.y == 'x' + d2 = d_any[int, str](2, 'y') + assert d2.x == 2 + assert d2.y == 'y' + + with assertRaises(TypeError): + c_any[int, str] + with assertRaises(TypeError): + d_any[int] + +class E[*Ts]: pass + +def test_type_var_tuple() -> None: + e: E[int, str] = E() + e_any: Any = E + assert isinstance(e_any(), E) + assert isinstance(e_any[int](), E) + assert isinstance(e_any[int, str](), E) + +class F[**P]: pass + +def test_param_spec() -> None: + f: F[[int, str]] = F() + f_any: Any = F + assert isinstance(f_any(), F) + assert isinstance(f_any[[int, str]](), F) + +class SubC[S](C[S]): + def __init__(self, x: S) -> None: + super().__init__(x) + +def test_generic_subclass() -> None: + s = SubC(1) + assert s.x == 1 + s2 = SubC[str]('y') + assert s2.x == 'y' + sub_any: Any = SubC + assert sub_any(1).x == 1 + assert sub_any[str]('x').x == 'x' + assert isinstance(s, SubC) + assert isinstance(s, C) + +class SubD[ + T, # Put everything on separate lines + S]( + D[T, + S]): pass + +def test_generic_subclass_two_params() -> None: + s = SubD(3, 'y') + assert s.x == 3 + assert s.y == 'y' + s2 = SubD[str, int]('z', 4) + assert s2.x == 'z' + assert s2.y == 4 + sub_any: Any = SubD + assert sub_any(3, 'y').y == 'y' + assert sub_any[int, str](3, 'y').y == 'y' + assert isinstance(s, SubD) + assert isinstance(s, D) + +class SubE[*Ts](E[*Ts]): pass + +def test_type_var_tuple_subclass() -> None: + sub_any: Any = SubE + assert isinstance(sub_any(), SubE) + assert isinstance(sub_any(), E) + assert isinstance(sub_any[int](), SubE) + assert isinstance(sub_any[int, str](), SubE) + + +class SubF[**P](F[P]): pass + +def test_param_spec_subclass() -> None: + sub_any: Any = SubF + assert isinstance(sub_any(), SubF) + assert isinstance(sub_any(), F) + assert isinstance(sub_any[[int]](), SubF) + assert isinstance(sub_any[[int, str]](), SubF) + +# We test that upper bounds and restricted values can be used, but not that +# they are introspectable + +def bound[T: C](x: T) -> T: + return x + +def test_function_with_upper_bound() -> None: + c = C(1) + assert bound(c) is c + +def restriction[T: (int, str)](x: T) -> T: + return x + +def test_function_with_value_restriction() -> None: + assert restriction(1) == 1 + assert restriction('x') == 'x' + +class Bound[T: C]: + def __init__(self, x: T) -> None: + self.x = x + +def test_class_with_upper_bound() -> None: + c = C(1) + b = Bound(c) + assert b.x is c + b2 = Bound[C](c) + assert b2.x is c + +class Restriction[T: (int, str)]: + def __init__(self, x: T) -> None: + self.x = x + +def test_class_with_value_restriction() -> None: + r = Restriction(1) + assert r.x == 1 + r2 = Restriction[str]('a') + assert r2.x == 'a' + +type A = int + +def test_simple_type_alias() -> None: + assert isinstance(A, TypeAliasType) + assert getattr(A, "__value__") is int + assert str(A) == "A" + +type B = Fwd[int] +Fwd = list + +def test_forward_reference_in_alias() -> None: + assert isinstance(B, TypeAliasType) + assert getattr(B, "__value__") == list[int] + +type R = int | list[R] + +def test_recursive_type_alias() -> None: + assert isinstance(R, TypeAliasType) + assert getattr(R, "__value__") == (int | list[R]) +[typing fixtures/typing-full.pyi] + +[case testPEP695GenericTypeAlias] +# flags: --enable-incomplete-feature=NewGenericSyntax +from typing import Callable +from types import GenericAlias + +from testutil import assertRaises + +type A[T] = list[T] + +def test_generic_alias() -> None: + assert type(A[str]) is GenericAlias + assert str(A[str]) == "A[str]" + assert str(getattr(A, "__value__")) == "list[T]" + +type B[T, S] = dict[S, T] + +def test_generic_alias_with_two_args() -> None: + assert str(B[str, int]) == "B[str, int]" + assert str(getattr(B, "__value__")) == "dict[S, T]" + +type C[*Ts] = tuple[*Ts] + +def test_type_var_tuple_type_alias() -> None: + assert str(C[int, str]) == "C[int, str]" + assert str(getattr(C, "__value__")) == "tuple[typing.Unpack[Ts]]" + +type D[**P] = Callable[P, int] + +def test_param_spec_type_alias() -> None: + assert str(D[[int, str]]) == "D[[int, str]]" + assert str(getattr(D, "__value__")) == "typing.Callable[P, int]" +[typing fixtures/typing-full.pyi] diff --git a/mypyc/test/test_run.py b/mypyc/test/test_run.py index 467ef8b87a928..37de192a92913 100644 --- a/mypyc/test/test_run.py +++ b/mypyc/test/test_run.py @@ -71,6 +71,8 @@ if sys.version_info >= (3, 10): files.append("run-match.test") +if sys.version_info >= (3, 12): + files.append("run-python312.test") setup_format = """\ from setuptools import setup @@ -194,6 +196,7 @@ def run_case_step(self, testcase: DataDrivenTestCase, incremental_step: int) -> options.preserve_asts = True options.allow_empty_bodies = True options.incremental = self.separate + options.enable_incomplete_feature.append("NewGenericSyntax") # Avoid checking modules/packages named 'unchecked', to provide a way # to test interacting with code we don't have types for. diff --git a/test-data/unit/check-async-await.test b/test-data/unit/check-async-await.test index 713c82c752ce6..876fe0c6be15a 100644 --- a/test-data/unit/check-async-await.test +++ b/test-data/unit/check-async-await.test @@ -765,7 +765,8 @@ class Task(Future[T]): @overload def wait(fs: Iterable[FT]) -> Future[Tuple[List[FT], List[FT]]]: ... \ - # E: Overloaded function signatures 1 and 2 overlap with incompatible return types + # E: Overloaded function signatures 1 and 2 overlap with incompatible return types \ + # N: Flipping the order of overloads will fix this error @overload def wait(fs: Iterable[Awaitable[T]]) -> Future[Tuple[List[Task[T]], List[Task[T]]]]: ... def wait(fs: Any) -> Any: @@ -789,6 +790,7 @@ async def precise2(futures: Iterable[Awaitable[int]]) -> None: done, pending = await wait(futures) reveal_type(done) # N: Revealed type is "builtins.list[__main__.Task[builtins.int]]" + [builtins fixtures/async_await.pyi] [typing fixtures/typing-async.pyi] diff --git a/test-data/unit/check-class-namedtuple.test b/test-data/unit/check-class-namedtuple.test index f334b90116456..fd564c7e96cb2 100644 --- a/test-data/unit/check-class-namedtuple.test +++ b/test-data/unit/check-class-namedtuple.test @@ -535,7 +535,7 @@ class Base(NamedTuple): self.x = 3 # E: Property "x" defined in "Base" is read-only self[1] # E: Tuple index out of range reveal_type(self[T]) # N: Revealed type is "builtins.int" \ - # E: No overload variant of "__getitem__" of "tuple" matches argument type "object" \ + # E: No overload variant of "__getitem__" of "tuple" matches argument type "TypeVar" \ # N: Possible overload variants: \ # N: def __getitem__(self, int, /) -> int \ # N: def __getitem__(self, slice, /) -> Tuple[int, ...] @@ -568,6 +568,7 @@ reveal_type(Base(1).bad_override()) # N: Revealed type is "builtins.int" reveal_type(takes_base(Base(1))) # N: Revealed type is "builtins.int" reveal_type(takes_base(Child(1))) # N: Revealed type is "builtins.int" [builtins fixtures/tuple.pyi] +[typing fixtures/typing-full.pyi] [case testNewNamedTupleIllegalNames] from typing import Callable, NamedTuple diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index 983cb8454a051..82208d27df41a 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -1950,6 +1950,41 @@ class B: def foo(x: Union[A, B]) -> None: reveal_type(x.attr) # N: Revealed type is "builtins.str" +[case testDescriptorGetUnionRestricted] +from typing import Any, Union + +class getter: + def __get__(self, instance: X1, owner: Any) -> str: ... + +class X1: + prop = getter() + +class X2: + prop: str + +def foo(x: Union[X1, X2]) -> None: + reveal_type(x.prop) # N: Revealed type is "builtins.str" + +[case testDescriptorGetUnionType] +from typing import Any, Union, Type, overload + +class getter: + @overload + def __get__(self, instance: None, owner: Any) -> getter: ... + @overload + def __get__(self, instance: object, owner: Any) -> str: ... + def __get__(self, instance, owner): + ... + +class X1: + prop = getter() +class X2: + prop = getter() + +def foo(x: Type[Union[X1, X2]]) -> None: + reveal_type(x.prop) # N: Revealed type is "__main__.getter" + + -- _promote decorators -- ------------------- @@ -2392,10 +2427,10 @@ class B: [builtins fixtures/tuple.pyi] [case testReverseOperatorTypeVar1] -from typing import TypeVar, Any +from typing import TypeVar T = TypeVar("T", bound='Real') class Real: - def __add__(self, other: Any) -> str: ... + def __add__(self, other: object) -> str: ... class Fraction(Real): def __radd__(self, other: T) -> T: ... # E: Signatures of "__radd__" of "Fraction" and "__add__" of "T" are unsafely overlapping @@ -2430,7 +2465,7 @@ reveal_type(Real() + Fraction()) # N: Revealed type is "__main__.Real" reveal_type(Fraction() + Fraction()) # N: Revealed type is "builtins.str" [case testReverseOperatorTypeVar3] -from typing import TypeVar, Any +from typing import TypeVar T = TypeVar("T", bound='Real') class Real: def __add__(self, other: FractionChild) -> str: ... @@ -2666,14 +2701,12 @@ class X: [out] tmp/foo.pyi:6: error: Signatures of "__radd__" of "B" and "__add__" of "X" are unsafely overlapping -[case testUnsafeOverlappingWithLineNo] +[case testUnsafeOverlappingNotWithAny] from typing import TypeVar class Real: def __add__(self, other) -> str: ... class Fraction(Real): def __radd__(self, other: Real) -> Real: ... -[out] -main:5: error: Signatures of "__radd__" of "Fraction" and "__add__" of "Real" are unsafely overlapping [case testOverlappingNormalAndInplaceOperatorMethod] import typing @@ -3431,7 +3464,7 @@ def foo(arg: Type[Any]): from typing import Type, Any def foo(arg: Type[Any]): reveal_type(arg.__str__) # N: Revealed type is "def () -> builtins.str" - reveal_type(arg.mro()) # N: Revealed type is "builtins.list[builtins.type[Any]]" + reveal_type(arg.mro()) # N: Revealed type is "builtins.list[builtins.type]" [builtins fixtures/type.pyi] [out] @@ -4007,10 +4040,16 @@ def f(a: Type[User]) -> int: pass # E: Overloaded function signatures 1 and 2 o @overload def f(a: object) -> str: pass +# Note: plain type is equivalent to Type[Any] so no error here @overload -def g(a: Type[User]) -> int: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def g(a: Type[User]) -> int: pass @overload def g(a: type) -> str: pass + +@overload +def h(a: Type[User]) -> int: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +@overload +def h(a: Type[object]) -> str: pass [builtins fixtures/classmethod.pyi] [out] @@ -4751,12 +4790,15 @@ def g(x: Type[S]) -> str: return reveal_type(x * 0) # N: Revealed type is "builtins.str" [case testMetaclassGetitem] +import types + class M(type): def __getitem__(self, key) -> int: return 1 class A(metaclass=M): pass reveal_type(A[M]) # N: Revealed type is "builtins.int" +[builtins fixtures/tuple.pyi] [case testMetaclassSelfType] from typing import TypeVar, Type diff --git a/test-data/unit/check-custom-plugin.test b/test-data/unit/check-custom-plugin.test index 63529cf165ce6..2b3b3f4a86958 100644 --- a/test-data/unit/check-custom-plugin.test +++ b/test-data/unit/check-custom-plugin.test @@ -1050,6 +1050,39 @@ reveal_type(my_class.stmethod) # N: Revealed type is "Overload(def (arg: builti \[mypy] plugins=/test-data/unit/plugins/add_overloaded_method.py +[case testAddMethodPluginExplicitOverride] +# flags: --python-version 3.12 --config-file tmp/mypy.ini +from typing import override, TypeVar + +T = TypeVar('T', bound=type) + +def inject_foo(t: T) -> T: + # Imitates: + # t.foo_implicit = some_method + return t + +class BaseWithoutFoo: pass + +@inject_foo +class ChildWithFoo(BaseWithoutFoo): pass +reveal_type(ChildWithFoo.foo_implicit) # N: Revealed type is "def (self: __main__.ChildWithFoo)" + +@inject_foo +class SomeWithFoo(ChildWithFoo): pass +reveal_type(SomeWithFoo.foo_implicit) # N: Revealed type is "def (self: __main__.SomeWithFoo)" + +class ExplicitOverride(SomeWithFoo): + @override + def foo_implicit(self) -> None: pass + +class ImplicitOverride(SomeWithFoo): + def foo_implicit(self) -> None: pass # E: Method "foo_implicit" is not using @override but is overriding a method in class "__main__.SomeWithFoo" +[file mypy.ini] +\[mypy] +plugins=/test-data/unit/plugins/add_method.py +enable_error_code = explicit-override +[typing fixtures/typing-override.pyi] + [case testCustomErrorCodePlugin] # flags: --config-file tmp/mypy.ini --show-error-codes def main() -> int: diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index 924f9c7bb5be0..f26ccd9a48541 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -2475,3 +2475,17 @@ class Base: class Child(Base): y: int [builtins fixtures/dataclasses.pyi] + + +[case testDataclassInheritanceWorksWithExplicitOverridesAndOrdering] +# flags: --enable-error-code explicit-override +from dataclasses import dataclass + +@dataclass(order=True) +class Base: + x: int + +@dataclass(order=True) +class Child(Base): + y: int +[builtins fixtures/dataclasses.pyi] diff --git a/test-data/unit/check-enum.test b/test-data/unit/check-enum.test index e8e65f464eaf1..78a114eda7642 100644 --- a/test-data/unit/check-enum.test +++ b/test-data/unit/check-enum.test @@ -1010,7 +1010,7 @@ _empty: Final = Empty.token def func(x: Union[int, None, Empty] = _empty) -> int: boom = x + 42 # E: Unsupported left operand type for + ("None") \ # E: Unsupported left operand type for + ("Empty") \ - # N: Left operand is of type "Union[int, None, Empty]" + # N: Left operand is of type "Union[int, Empty, None]" if x is _empty: reveal_type(x) # N: Revealed type is "Literal[__main__.Empty.token]" return 0 @@ -1056,7 +1056,7 @@ _empty = Empty.token def func(x: Union[int, None, Empty] = _empty) -> int: boom = x + 42 # E: Unsupported left operand type for + ("None") \ # E: Unsupported left operand type for + ("Empty") \ - # N: Left operand is of type "Union[int, None, Empty]" + # N: Left operand is of type "Union[int, Empty, None]" if x is _empty: reveal_type(x) # N: Revealed type is "Literal[__main__.Empty.token]" return 0 @@ -1084,7 +1084,7 @@ _empty = Empty.token def func(x: Union[int, None, Empty] = _empty) -> int: boom = x + 42 # E: Unsupported left operand type for + ("None") \ # E: Unsupported left operand type for + ("Empty") \ - # N: Left operand is of type "Union[int, None, Empty]" + # N: Left operand is of type "Union[int, Empty, None]" if x is _empty: reveal_type(x) # N: Revealed type is "Literal[__main__.Empty.token]" return 0 @@ -2138,3 +2138,50 @@ elif e == MyEnum.B: else: reveal_type(e) # E: Statement is unreachable [builtins fixtures/dict.pyi] + + +[case testEnumNonMemberSupport] +# flags: --python-version 3.11 +# This was added in 3.11 +from enum import Enum, nonmember + +class My(Enum): + a = 1 + b = 2 + c = nonmember(3) + +reveal_type(My.a) # N: Revealed type is "Literal[__main__.My.a]?" +reveal_type(My.b) # N: Revealed type is "Literal[__main__.My.b]?" +reveal_type(My.c) # N: Revealed type is "builtins.int" + +def accepts_my(my: My): + reveal_type(my.value) # N: Revealed type is "Union[Literal[1]?, Literal[2]?]" + +class Other(Enum): + a = 1 + @nonmember + class Support: + b = 2 + +reveal_type(Other.a) # N: Revealed type is "Literal[__main__.Other.a]?" +reveal_type(Other.Support.b) # N: Revealed type is "builtins.int" +[builtins fixtures/dict.pyi] + + +[case testEnumMemberSupport] +# flags: --python-version 3.11 +# This was added in 3.11 +from enum import Enum, member + +class A(Enum): + x = member(1) + y = 2 + +reveal_type(A.x) # N: Revealed type is "Literal[__main__.A.x]?" +reveal_type(A.x.value) # N: Revealed type is "Literal[1]?" +reveal_type(A.y) # N: Revealed type is "Literal[__main__.A.y]?" +reveal_type(A.y.value) # N: Revealed type is "Literal[2]?" + +def some_a(a: A): + reveal_type(a.value) # N: Revealed type is "Union[Literal[1]?, Literal[2]?]" +[builtins fixtures/dict.pyi] diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test index 9d49480539e0f..961815b118174 100644 --- a/test-data/unit/check-errorcodes.test +++ b/test-data/unit/check-errorcodes.test @@ -892,6 +892,12 @@ if a: any_or_object: Union[object, Any] if any_or_object: pass + +if (my_foo := Foo()): # E: "__main__.my_foo" has type "Foo" which does not implement __bool__ or __len__ so it could always be true in boolean context [truthy-bool] + pass + +if my_a := (a or Foo()): # E: "__main__.Foo" returns "Foo" which does not implement __bool__ or __len__ so it could always be true in boolean context [truthy-bool] + pass [builtins fixtures/list.pyi] [case testTruthyFunctions] diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test index 04b3f7a131cc9..f9bd60f4dcc8f 100644 --- a/test-data/unit/check-expressions.test +++ b/test-data/unit/check-expressions.test @@ -1640,7 +1640,7 @@ from typing import Generator def g() -> Generator[int, None, None]: x = yield from () # E: Function does not return a value (it only ever returns None) x = yield from (0, 1, 2) # E: Function does not return a value (it only ever returns None) - x = yield from (0, "ERROR") # E: Incompatible types in "yield from" (actual type "object", expected type "int") \ + x = yield from (0, "ERROR") # E: Incompatible types in "yield from" (actual type "Union[int, str]", expected type "int") \ # E: Function does not return a value (it only ever returns None) x = yield from ("ERROR",) # E: Incompatible types in "yield from" (actual type "str", expected type "int") \ # E: Function does not return a value (it only ever returns None) @@ -2449,5 +2449,7 @@ def f() -> int: # E: Missing return statement from typing import TypeVar T = TypeVar("T") x: int -x + T # E: Unsupported operand types for + ("int" and "object") -T() # E: "object" not callable +x + T # E: Unsupported left operand type for + ("int") +T() # E: "TypeVar" not callable +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-full.pyi] diff --git a/test-data/unit/check-final.test b/test-data/unit/check-final.test index 26a0d0782503b..dadf76a283b01 100644 --- a/test-data/unit/check-final.test +++ b/test-data/unit/check-final.test @@ -301,6 +301,50 @@ class P(Protocol): pass [out] +[case testFinalInProtocol] +from typing import Final, Protocol, final + +class P(Protocol): + var1 : Final[int] = 0 # E: Protocol member cannot be final + + @final # E: Protocol member cannot be final + def meth1(self) -> None: + var2: Final = 0 + + def meth2(self) -> None: + var3: Final = 0 + + def meth3(self) -> None: + class Inner: + var3: Final = 0 # OK + + @final + def inner(self) -> None: ... + + class Inner: + var3: Final = 0 # OK + + @final + def inner(self) -> None: ... + +[out] + +[case testFinalWithClassVarInProtocol] +from typing import Protocol, Final, final, ClassVar + +class P(Protocol): + var1 : Final[ClassVar[int]] = 0 # E: Variable should not be annotated with both ClassVar and Final + var2: ClassVar[int] = 1 + + @final # E: Protocol member cannot be final + def meth1(self) -> None: + ... + + def meth2(self) -> None: + var3: Final[ClassVar[int]] = 0 # E: Variable should not be annotated with both ClassVar and Final # E: ClassVar can only be used for assignments in class body + +[out] + [case testFinalNotInLoops] from typing import Final diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test index fe01590c6c71c..93540e203c36c 100644 --- a/test-data/unit/check-functions.test +++ b/test-data/unit/check-functions.test @@ -40,11 +40,10 @@ class B(A): class C(A): def f(self, *, b: int, a: str) -> None: pass # Fail [out] -main:10: error: Signature of "f" incompatible with supertype "A" -main:10: note: Superclass: -main:10: note: def f(self, *, a: int, b: str) -> None -main:10: note: Subclass: -main:10: note: def f(self, *, b: int, a: str) -> None +main:10: error: Argument 1 of "f" is incompatible with supertype "A"; supertype defines the argument type as "str" +main:10: note: This violates the Liskov substitution principle +main:10: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides +main:10: error: Argument 2 of "f" is incompatible with supertype "A"; supertype defines the argument type as "int" [case testPositionalOverridingArgumentNameInsensitivity] import typing @@ -1780,10 +1779,10 @@ def Arg(x, y): pass F = Callable[[Arg(int, 'x')], int] # E: Invalid argument constructor "__main__.Arg" [case testCallableParsingFromExpr] - from typing import Callable, List from mypy_extensions import Arg, VarArg, KwArg import mypy_extensions +import types # Needed for type checking def WrongArg(x, y): return y # Note that for this test, the 'Value of type "int" is not indexable' errors are silly, @@ -1800,11 +1799,10 @@ L = Callable[[Arg(name='x', type=int)], int] # ok # I have commented out the following test because I don't know how to expect the "defined here" note part of the error. # M = Callable[[Arg(gnome='x', type=int)], int] E: Invalid type alias: expression is not a valid type E: Unexpected keyword argument "gnome" for "Arg" N = Callable[[Arg(name=None, type=int)], int] # ok -O = Callable[[List[Arg(int)]], int] # E: Invalid type alias: expression is not a valid type # E: Value of type "int" is not indexable # E: Type expected within [...] # E: The type "Type[List[Any]]" is not generic and not indexable +O = Callable[[List[Arg(int)]], int] # E: Invalid type alias: expression is not a valid type # E: Value of type "int" is not indexable # E: Type expected within [...] P = Callable[[mypy_extensions.VarArg(int)], int] # ok Q = Callable[[Arg(int, type=int)], int] # E: Invalid type alias: expression is not a valid type # E: Value of type "int" is not indexable # E: "Arg" gets multiple values for keyword argument "type" R = Callable[[Arg(int, 'x', name='y')], int] # E: Invalid type alias: expression is not a valid type # E: Value of type "int" is not indexable # E: "Arg" gets multiple values for keyword argument "name" - [builtins fixtures/dict.pyi] [case testCallableParsing] @@ -3240,3 +3238,147 @@ class Base: class Derived(Base): def foo(self): # E: Cannot override final attribute "foo" (previously declared in base class "Base") pass + +[case testTypeVarIdClashPolymorphic] +from typing import Callable, Generic, TypeVar + +A = TypeVar("A") +B = TypeVar("B") + +class Gen(Generic[A]): ... + +def id_(x: A) -> A: ... +def f(x: Gen[A], y: A) -> Gen[Gen[A]]: ... +def g(x: Gen[A], id_: Callable[[B], B], f: Callable[[A, B], Gen[A]]) -> A: ... + +def test(x: Gen[Gen[A]]) -> Gen[A]: + return g(x, id_, f) # Technically OK + +x: Gen[Gen[int]] +reveal_type(g(x, id_, f)) # N: Revealed type is "__main__.Gen[builtins.int]" + +def h(x: A, y: A) -> A: ... +def gn(id_: Callable[[B], B], step: Callable[[A, B], A]) -> A: ... + +def fn(x: A) -> A: + return gn(id_, h) # Technically OK + +[case testTypeVarIdsNested] +from typing import Callable, TypeVar + +A = TypeVar("A") +B = TypeVar("B") + +def f(x: Callable[[A], A]) -> Callable[[B], B]: + def g(x: B) -> B: ... + return g + +reveal_type(f(f)) # N: Revealed type is "def [B] (B`1) -> B`1" +reveal_type(f(f)(f)) # N: Revealed type is "def [A] (x: def (A`-1) -> A`-1) -> def [B] (B`-2) -> B`-2" + +[case testGenericUnionFunctionJoin] +from typing import TypeVar, Union + +T = TypeVar("T") +S = TypeVar("S") + +def f(x: T, y: S) -> Union[T, S]: ... +def g(x: T, y: S) -> Union[T, S]: ... + +x = [f, g] +reveal_type(x) # N: Revealed type is "builtins.list[def [T, S] (x: T`4, y: S`5) -> Union[T`4, S`5]]" +[builtins fixtures/list.pyi] + +[case testTypeVariableClashErrorMessage] +from typing import TypeVar + +T = TypeVar("T") + +class C: # Note: Generic[T] missing + def bad_idea(self, x: T) -> None: + self.x = x + + def nope(self, x: T) -> None: + self.x = x # E: Incompatible types in assignment (expression has type "T@nope", variable has type "T@bad_idea") + +[case testNoCrashOnBadCallablePropertyOverride] +from typing import Callable, Union + +class C: ... +class D: ... + +A = Callable[[C], None] +B = Callable[[D], None] + +class Foo: + @property + def method(self) -> Callable[[int, Union[A, B]], None]: + ... + +class Bar(Foo): + @property + def method(self) -> Callable[[int, A], None]: # E: Argument 2 of "method" is incompatible with supertype "Foo"; supertype defines the argument type as "Union[Callable[[C], None], Callable[[D], None]]" \ + # N: This violates the Liskov substitution principle \ + # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides + ... +[builtins fixtures/property.pyi] + +[case testNoCrashOnUnpackOverride] +from typing import Unpack +from typing_extensions import TypedDict + +class Params(TypedDict): + x: int + y: str + +class Other(TypedDict): + x: int + y: int + +class B: + def meth(self, **kwargs: Unpack[Params]) -> None: + ... +class C(B): + def meth(self, **kwargs: Unpack[Other]) -> None: # E: Signature of "meth" incompatible with supertype "B" \ + # N: Superclass: \ + # N: def meth(*, x: int, y: str) -> None \ + # N: Subclass: \ + # N: def meth(*, x: int, y: int) -> None + + ... +[builtins fixtures/tuple.pyi] + +[case testOverrideErrorLocationNamed] +class B: + def meth( + self, *, + x: int, + y: str, + ) -> None: + ... +class C(B): + def meth( + self, *, + y: int, # E: Argument 1 of "meth" is incompatible with supertype "B"; supertype defines the argument type as "str" \ + # N: This violates the Liskov substitution principle \ + # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides + x: int, + ) -> None: + ... +[builtins fixtures/tuple.pyi] + +[case testLambdaAlwaysAllowed] +# flags: --disallow-untyped-calls +from typing import Callable, Optional + +def func() -> Optional[str]: ... +var: Optional[str] + +factory: Callable[[], Optional[str]] +for factory in ( + lambda: var, + func, +): + reveal_type(factory) # N: Revealed type is "def () -> Union[builtins.str, None]" + var = factory() +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-functools.test b/test-data/unit/check-functools.test index 38083ad98f213..e4b3e4cffdc11 100644 --- a/test-data/unit/check-functools.test +++ b/test-data/unit/check-functools.test @@ -183,8 +183,8 @@ p3(1, 3) # E: Too many positional arguments for "foo" \ # E: Argument 2 to "foo" has incompatible type "int"; expected "str" functools.partial(foo, "a") # E: Argument 1 to "foo" has incompatible type "str"; expected "int" -functools.partial(foo, b=1) # E: Argument 1 to "foo" has incompatible type "int"; expected "str" -functools.partial(foo, a=1, b=2, c=3) # E: Argument 2 to "foo" has incompatible type "int"; expected "str" +functools.partial(foo, b=1) # E: Argument "b" to "foo" has incompatible type "int"; expected "str" +functools.partial(foo, a=1, b=2, c=3) # E: Argument "b" to "foo" has incompatible type "int"; expected "str" functools.partial(1) # E: "int" not callable \ # E: Argument 1 to "partial" has incompatible type "int"; expected "Callable[..., Never]" [builtins fixtures/dict.pyi] @@ -347,6 +347,37 @@ reveal_type(functools.partial(fn3, 2)()) # E: "str" not callable \ # E: Argument 1 to "partial" has incompatible type "Union[Callable[[int], int], str]"; expected "Callable[..., int]" [builtins fixtures/tuple.pyi] +[case testFunctoolsPartialExplicitType] +from functools import partial +from typing import Type, TypeVar, Callable + +T = TypeVar("T") +def generic(string: str, integer: int, resulting_type: Type[T]) -> T: ... + +p: partial[str] = partial(generic, resulting_type=str) +q: partial[bool] = partial(generic, resulting_type=str) # E: Argument "resulting_type" to "generic" has incompatible type "Type[str]"; expected "Type[bool]" + +pc: Callable[..., str] = partial(generic, resulting_type=str) +qc: Callable[..., bool] = partial(generic, resulting_type=str) # E: Incompatible types in assignment (expression has type "partial[str]", variable has type "Callable[..., bool]") \ + # N: "partial[str].__call__" has type "Callable[[VarArg(Any), KwArg(Any)], str]" +[builtins fixtures/tuple.pyi] + +[case testFunctoolsPartialNestedPartial] +from functools import partial +from typing import Any + +def foo(x: int) -> int: ... +p = partial(partial, foo) +reveal_type(p()(1)) # N: Revealed type is "builtins.int" +p()("no") # E: Argument 1 to "foo" has incompatible type "str"; expected "int" + +q = partial(partial, partial, foo) +q()()("no") # E: Argument 1 to "foo" has incompatible type "str"; expected "int" + +r = partial(partial, foo, 1) +reveal_type(r()()) # N: Revealed type is "builtins.int" +[builtins fixtures/tuple.pyi] + [case testFunctoolsPartialTypeObject] import functools from typing import Type, Generic, TypeVar @@ -372,3 +403,68 @@ def foo(cls3: Type[B[T]]): reveal_type(functools.partial(cls3, 2)()) # N: Revealed type is "__main__.B[T`-1]" \ # E: Argument 1 to "B" has incompatible type "int"; expected "T" [builtins fixtures/tuple.pyi] + +[case testFunctoolsPartialTypedDictUnpack] +from typing_extensions import TypedDict, Unpack +from functools import partial + +class Data(TypedDict, total=False): + x: int + +def f(**kwargs: Unpack[Data]) -> None: ... +def g(**kwargs: Unpack[Data]) -> None: + partial(f, **kwargs)() + +class MoreData(TypedDict, total=False): + x: int + y: int + +def f_more(**kwargs: Unpack[MoreData]) -> None: ... +def g_more(**kwargs: Unpack[MoreData]) -> None: + partial(f_more, **kwargs)() + +class Good(TypedDict, total=False): + y: int +class Bad(TypedDict, total=False): + y: str + +def h(**kwargs: Unpack[Data]) -> None: + bad: Bad + partial(f_more, **kwargs)(**bad) # E: Argument "y" to "f_more" has incompatible type "str"; expected "int" + good: Good + partial(f_more, **kwargs)(**good) +[builtins fixtures/dict.pyi] + +[case testFunctoolsPartialNestedGeneric] +from functools import partial +from typing import Generic, TypeVar, List + +T = TypeVar("T") +def get(n: int, args: List[T]) -> T: ... +first = partial(get, 0) + +x: List[str] +reveal_type(first(x)) # N: Revealed type is "builtins.str" +reveal_type(first([1])) # N: Revealed type is "builtins.int" + +first_kw = partial(get, n=0) +reveal_type(first_kw(args=[1])) # N: Revealed type is "builtins.int" + +# TODO: this is indeed invalid, but the error is incomprehensible. +first_kw([1]) # E: Too many positional arguments for "get" \ + # E: Too few arguments for "get" \ + # E: Argument 1 to "get" has incompatible type "List[int]"; expected "int" +[builtins fixtures/list.pyi] + +[case testFunctoolsPartialClassObjectMatchingPartial] +from functools import partial + +class A: + def __init__(self, var: int, b: int, c: int) -> None: ... + +p = partial(A, 1) +reveal_type(p) # N: Revealed type is "functools.partial[__main__.A]" +p(1, "no") # E: Argument 2 to "A" has incompatible type "str"; expected "int" + +q: partial[A] = partial(A, 1) # OK +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-generic-subtyping.test b/test-data/unit/check-generic-subtyping.test index fd40f128ff4a5..90180e0f83f6a 100644 --- a/test-data/unit/check-generic-subtyping.test +++ b/test-data/unit/check-generic-subtyping.test @@ -306,7 +306,7 @@ main:14: error: Signature of "f" incompatible with supertype "A" main:14: note: Superclass: main:14: note: def [S] f(self, x: int, y: S) -> None main:14: note: Subclass: -main:14: note: def [T1 <: str, S] f(self, x: T1, y: S) -> None +main:14: note: def [T1: str, S] f(self, x: T1, y: S) -> None -- Inheritance from generic types with implicit dynamic supertype -- -------------------------------------------------------------- diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test index b1d1ff3f46a11..b8cc0422b749f 100644 --- a/test-data/unit/check-generics.test +++ b/test-data/unit/check-generics.test @@ -454,11 +454,13 @@ A[int, str, int]() # E: Type application has too many types (2 expected) [out] [case testInvalidTypeApplicationType] +import types a: A class A: pass a[A]() # E: Value of type "A" is not indexable A[A]() # E: The type "Type[A]" is not generic and not indexable -[out] +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-full.pyi] [case testTypeApplicationArgTypes] from typing import TypeVar, Generic @@ -513,8 +515,9 @@ Alias[int]("a") # E: Argument 1 to "Node" has incompatible type "str"; expected [out] [case testTypeApplicationCrash] +import types type[int] # this was crashing, see #2302 (comment) # E: The type "Type[type]" is not generic and not indexable -[out] +[builtins fixtures/tuple.pyi] -- Generic type aliases @@ -624,6 +627,7 @@ reveal_type(y) X = T # Error [builtins fixtures/list.pyi] +[typing fixtures/typing-full.pyi] [out] main:9:5: error: "Node" expects 2 type arguments, but 1 given main:11:5: error: "Node" expects 2 type arguments, but 3 given @@ -1608,17 +1612,17 @@ if int(): if int(): y1 = f3 if int(): - y1 = f4 # E: Incompatible types in assignment (expression has type "Callable[[int], A]", variable has type "Callable[[A], A]") + y1 = f4 # E: Incompatible types in assignment (expression has type "Callable[[int], A@f4]", variable has type "Callable[[A@f1], A@f1]") y2 = f2 if int(): y2 = f2 if int(): - y2 = f1 # E: Incompatible types in assignment (expression has type "Callable[[A], A]", variable has type "Callable[[A], B]") + y2 = f1 # E: Incompatible types in assignment (expression has type "Callable[[A@f1], A@f1]", variable has type "Callable[[A@f2], B]") if int(): - y2 = f3 # E: Incompatible types in assignment (expression has type "Callable[[B], B]", variable has type "Callable[[A], B]") + y2 = f3 # E: Incompatible types in assignment (expression has type "Callable[[B@f3], B@f3]", variable has type "Callable[[A], B@f2]") if int(): - y2 = f4 # E: Incompatible types in assignment (expression has type "Callable[[int], A]", variable has type "Callable[[A], B]") + y2 = f4 # E: Incompatible types in assignment (expression has type "Callable[[int], A@f4]", variable has type "Callable[[A@f2], B]") y3 = f3 if int(): @@ -1634,7 +1638,7 @@ y4 = f4 if int(): y4 = f4 if int(): - y4 = f1 # E: Incompatible types in assignment (expression has type "Callable[[A], A]", variable has type "Callable[[int], A]") + y4 = f1 # E: Incompatible types in assignment (expression has type "Callable[[A@f1], A@f1]", variable has type "Callable[[int], A@f4]") if int(): y4 = f2 if int(): @@ -1655,26 +1659,26 @@ def outer(t: T) -> None: y1 = f1 if int(): y1 = f2 - y1 = f3 # E: Incompatible types in assignment (expression has type "Callable[[T], A]", variable has type "Callable[[A], A]") - y1 = f4 # E: Incompatible types in assignment (expression has type "Callable[[A], T]", variable has type "Callable[[A], A]") + y1 = f3 # E: Incompatible types in assignment (expression has type "Callable[[T], A@f3]", variable has type "Callable[[A@f1], A@f1]") + y1 = f4 # E: Incompatible types in assignment (expression has type "Callable[[A@f4], T]", variable has type "Callable[[A@f1], A@f1]") y1 = f5 # E: Incompatible types in assignment (expression has type "Callable[[T], T]", variable has type "Callable[[A], A]") y2 = f2 if int(): - y2 = f1 # E: Incompatible types in assignment (expression has type "Callable[[A], A]", variable has type "Callable[[A], B]") + y2 = f1 # E: Incompatible types in assignment (expression has type "Callable[[A@f1], A@f1]", variable has type "Callable[[A@f2], B]") y3 = f3 if int(): - y3 = f1 # E: Incompatible types in assignment (expression has type "Callable[[A], A]", variable has type "Callable[[T], A]") + y3 = f1 # E: Incompatible types in assignment (expression has type "Callable[[A@f1], A@f1]", variable has type "Callable[[T], A@f3]") y3 = f2 - y3 = f4 # E: Incompatible types in assignment (expression has type "Callable[[A], T]", variable has type "Callable[[T], A]") + y3 = f4 # E: Incompatible types in assignment (expression has type "Callable[[A@f4], T]", variable has type "Callable[[T], A@f3]") y3 = f5 # E: Incompatible types in assignment (expression has type "Callable[[T], T]", variable has type "Callable[[T], A]") y4 = f4 if int(): - y4 = f1 # E: Incompatible types in assignment (expression has type "Callable[[A], A]", variable has type "Callable[[A], T]") + y4 = f1 # E: Incompatible types in assignment (expression has type "Callable[[A@f1], A@f1]", variable has type "Callable[[A@f4], T]") y4 = f2 - y4 = f3 # E: Incompatible types in assignment (expression has type "Callable[[T], A]", variable has type "Callable[[A], T]") + y4 = f3 # E: Incompatible types in assignment (expression has type "Callable[[T], A@f3]", variable has type "Callable[[A@f4], T]") y4 = f5 # E: Incompatible types in assignment (expression has type "Callable[[T], T]", variable has type "Callable[[A], T]") y5 = f5 @@ -1683,7 +1687,6 @@ def outer(t: T) -> None: y5 = f2 y5 = f3 y5 = f4 -[out] [case testSubtypingWithGenericFunctionUsingTypevarWithValues] from typing import TypeVar, Callable @@ -2928,8 +2931,8 @@ def mix(fs: List[Callable[[S], T]]) -> Callable[[S], List[T]]: def id(__x: U) -> U: ... fs = [id, id, id] -reveal_type(mix(fs)) # N: Revealed type is "def [S] (S`3) -> builtins.list[S`3]" -reveal_type(mix([id, id, id])) # N: Revealed type is "def [S] (S`5) -> builtins.list[S`5]" +reveal_type(mix(fs)) # N: Revealed type is "def [S] (S`11) -> builtins.list[S`11]" +reveal_type(mix([id, id, id])) # N: Revealed type is "def [S] (S`13) -> builtins.list[S`13]" [builtins fixtures/list.pyi] [case testInferenceAgainstGenericCurry] @@ -3100,13 +3103,13 @@ def dec4_bound(f: Callable[[I], List[T]]) -> Callable[[I], T]: ... reveal_type(dec1(lambda x: x)) # N: Revealed type is "def [T] (T`3) -> builtins.list[T`3]" -reveal_type(dec2(lambda x: x)) # N: Revealed type is "def [S] (S`4) -> builtins.list[S`4]" -reveal_type(dec3(lambda x: x[0])) # N: Revealed type is "def [S] (S`6) -> S`6" -reveal_type(dec4(lambda x: [x])) # N: Revealed type is "def [S] (S`9) -> S`9" +reveal_type(dec2(lambda x: x)) # N: Revealed type is "def [S] (S`5) -> builtins.list[S`5]" +reveal_type(dec3(lambda x: x[0])) # N: Revealed type is "def [S] (S`8) -> S`8" +reveal_type(dec4(lambda x: [x])) # N: Revealed type is "def [S] (S`12) -> S`12" reveal_type(dec1(lambda x: 1)) # N: Revealed type is "def (builtins.int) -> builtins.list[builtins.int]" reveal_type(dec5(lambda x: x)) # N: Revealed type is "def (builtins.int) -> builtins.list[builtins.int]" -reveal_type(dec3(lambda x: x)) # N: Revealed type is "def [S] (S`16) -> builtins.list[S`16]" -reveal_type(dec4(lambda x: x)) # N: Revealed type is "def [T] (builtins.list[T`19]) -> T`19" +reveal_type(dec3(lambda x: x)) # N: Revealed type is "def [S] (S`20) -> builtins.list[S`20]" +reveal_type(dec4(lambda x: x)) # N: Revealed type is "def [T] (builtins.list[T`24]) -> T`24" dec4_bound(lambda x: x) # E: Value of type variable "I" of "dec4_bound" cannot be "List[T]" [builtins fixtures/list.pyi] @@ -3186,7 +3189,7 @@ reveal_type(dec(id)) # N: Revealed type is "def [T] (T`3) -> T`3" reveal_type(dec(either)) # N: Revealed type is "def [T] (T`6, x: T`6) -> T`6" reveal_type(dec(pair)) # N: Revealed type is "def [T, U] (T`9, x: U`-1) -> Tuple[T`9, U`-1]" # This is counter-intuitive but looks correct, dec matches itself only if P can be empty -reveal_type(dec(dec)) # N: Revealed type is "def [T, S] (T`12, f: def () -> def (T`12) -> S`13) -> S`13" +reveal_type(dec(dec)) # N: Revealed type is "def [T, S] (T`13, f: def () -> def (T`13) -> S`14) -> S`14" [builtins fixtures/list.pyi] [case testInferenceAgainstGenericParamSpecVsParamSpec] @@ -3384,7 +3387,7 @@ reveal_type(dec(id)) # N: Revealed type is "def [T] (T`3) -> T`3" reveal_type(dec(either)) # N: Revealed type is "def [T] (T`6, T`6) -> T`6" reveal_type(dec(pair)) # N: Revealed type is "def [T, U] (T`9, U`-1) -> Tuple[T`9, U`-1]" # This is counter-intuitive but looks correct, dec matches itself only if Ts is empty -reveal_type(dec(dec)) # N: Revealed type is "def [T, S] (T`12, def () -> def (T`12) -> S`13) -> S`13" +reveal_type(dec(dec)) # N: Revealed type is "def [T, S] (T`13, def () -> def (T`13) -> S`14) -> S`14" [builtins fixtures/list.pyi] [case testInferenceAgainstGenericVariadicVsVariadic] @@ -3443,3 +3446,104 @@ reveal_type(dec(g)) # N: Revealed type is "def (builtins.int) -> __main__.Foo[b h: Callable[[Unpack[Us]], Foo[int]] reveal_type(dec(h)) # N: Revealed type is "def (builtins.int) -> __main__.Foo[builtins.int]" [builtins fixtures/list.pyi] + +[case testTypeApplicationGenericConstructor] +from typing import Generic, TypeVar, Callable + +T = TypeVar("T") +S = TypeVar("S") +class C(Generic[T]): + def __init__(self, f: Callable[[S], T], x: S) -> None: + self.x = f(x) + +reveal_type(C[int]) # N: Revealed type is "def [S] (f: def (S`-1) -> builtins.int, x: S`-1) -> __main__.C[builtins.int]" +Alias = C[int] +C[int, str] # E: Type application has too many types (1 expected) + +[case testHigherOrderGenericPartial] +from typing import TypeVar, Callable + +T = TypeVar("T") +S = TypeVar("S") +U = TypeVar("U") +def apply(f: Callable[[T], S], x: T) -> S: ... +def id(x: U) -> U: ... + +A1 = TypeVar("A1") +A2 = TypeVar("A2") +R = TypeVar("R") +def fake_partial(fun: Callable[[A1, A2], R], arg: A1) -> Callable[[A2], R]: ... + +f_pid = fake_partial(apply, id) +reveal_type(f_pid) # N: Revealed type is "def [A2] (A2`2) -> A2`2" +reveal_type(f_pid(1)) # N: Revealed type is "builtins.int" + +[case testInvalidTypeVarParametersConcrete] +from typing import Callable, Generic, ParamSpec, Protocol, TypeVar, overload + +P = ParamSpec('P') +P2 = ParamSpec('P2') +R = TypeVar('R') +R2 = TypeVar('R2') + +class C(Generic[P, R, P2, R2]): ... + +class Proto(Protocol[P, R]): + @overload + def __call__(self, f: Callable[P2, R2]) -> C[P2, R2, ..., R]: ... + @overload + def __call__(self, **kwargs) -> C[P, R, ..., [int, str]]: ... # E: Cannot use "[int, str]" for regular type variable, only for ParamSpec +[builtins fixtures/tuple.pyi] + +[case testInvalidTypeVarParametersArbitrary] +from typing import Callable, Generic, ParamSpec, Protocol, TypeVar, overload + +P = ParamSpec('P') +P2 = ParamSpec('P2') +R = TypeVar('R') +R2 = TypeVar('R2') + +class C(Generic[P, R, P2, R2]): ... + +class Proto(Protocol[P, R]): + @overload + def __call__(self, f: Callable[P2, R2]) -> C[P2, R2, ..., R]: ... + @overload + def __call__(self, **kwargs) -> C[P, R, ..., ...]: ... # E: Cannot use "[VarArg(Any), KwArg(Any)]" for regular type variable, only for ParamSpec +[builtins fixtures/tuple.pyi] + +[case testGenericOverloadOverlapUnion] +from typing import TypeVar, overload, Union, Generic + +K = TypeVar("K") +V = TypeVar("V") +T = TypeVar("T") + +class C(Generic[K, V]): + @overload + def pop(self, key: K) -> V: ... + @overload + def pop(self, key: K, default: Union[V, T] = ...) -> Union[V, T]: ... + def pop(self, key: K, default: Union[V, T] = ...) -> Union[V, T]: + ... + +[case testOverloadedGenericInit] +from typing import TypeVar, overload, Union, Generic + +T = TypeVar("T") +S = TypeVar("S") + +class Int(Generic[T]): ... +class Str(Generic[T]): ... + +class C(Generic[T]): + @overload + def __init__(self: C[Int[S]], x: int, y: S) -> None: ... + @overload + def __init__(self: C[Str[S]], x: str, y: S) -> None: ... + def __init__(self, x, y) -> None: ... + +def foo(x: T): + reveal_type(C) # N: Revealed type is "Overload(def [T, S] (x: builtins.int, y: S`-1) -> __main__.C[__main__.Int[S`-1]], def [T, S] (x: builtins.str, y: S`-1) -> __main__.C[__main__.Str[S`-1]])" + reveal_type(C(0, x)) # N: Revealed type is "__main__.C[__main__.Int[T`-1]]" + reveal_type(C("yes", x)) # N: Revealed type is "__main__.C[__main__.Str[T`-1]]" diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index ead896b8e458c..24292bce3e21d 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -1833,6 +1833,21 @@ main:3: note: Revealed type is "builtins.int" main:3: note: Revealed type is "Any" +[case testIncrementalIgnoreErrors] +# flags: --config-file tmp/mypy.ini +import a +[file a.py] +import module_that_will_be_deleted +[file module_that_will_be_deleted.py] + +[file mypy.ini] +\[mypy] +\[mypy-a] +ignore_errors = True +[delete module_that_will_be_deleted.py.2] +[out1] +[out2] + [case testIncrementalNamedTupleInMethod] from ntcrash import nope [file ntcrash.py] diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index 08b53ab169724..fcd03f8efe014 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -988,7 +988,7 @@ a = k2 if int(): a = k2 if int(): - a = k1 # E: Incompatible types in assignment (expression has type "Callable[[int, List[T]], List[Union[T, int]]]", variable has type "Callable[[S, List[T]], List[Union[T, int]]]") + a = k1 # E: Incompatible types in assignment (expression has type "Callable[[int, List[T@k1]], List[Union[T@k1, int]]]", variable has type "Callable[[S, List[T@k2]], List[Union[T@k2, int]]]") b = k1 if int(): b = k1 diff --git a/test-data/unit/check-namedtuple.test b/test-data/unit/check-namedtuple.test index a0d984b302799..e9d156754d9cc 100644 --- a/test-data/unit/check-namedtuple.test +++ b/test-data/unit/check-namedtuple.test @@ -1249,7 +1249,7 @@ nti: NT[int] reveal_type(nti * x) # N: Revealed type is "builtins.tuple[builtins.int, ...]" nts: NT[str] -reveal_type(nts * x) # N: Revealed type is "builtins.tuple[builtins.object, ...]" +reveal_type(nts * x) # N: Revealed type is "builtins.tuple[Union[builtins.int, builtins.str], ...]" [builtins fixtures/tuple.pyi] [typing fixtures/typing-namedtuple.pyi] @@ -1310,9 +1310,9 @@ reveal_type(foo(nti, nts)) # N: Revealed type is "Tuple[builtins.int, builtins. reveal_type(foo(nts, nti)) # N: Revealed type is "Tuple[builtins.int, builtins.object, fallback=__main__.NT[builtins.object]]" reveal_type(foo(nti, x)) # N: Revealed type is "builtins.tuple[builtins.int, ...]" -reveal_type(foo(nts, x)) # N: Revealed type is "builtins.tuple[builtins.object, ...]" +reveal_type(foo(nts, x)) # N: Revealed type is "builtins.tuple[Union[builtins.int, builtins.str], ...]" reveal_type(foo(x, nti)) # N: Revealed type is "builtins.tuple[builtins.int, ...]" -reveal_type(foo(x, nts)) # N: Revealed type is "builtins.tuple[builtins.object, ...]" +reveal_type(foo(x, nts)) # N: Revealed type is "builtins.tuple[Union[builtins.int, builtins.str], ...]" [builtins fixtures/tuple.pyi] [typing fixtures/typing-namedtuple.pyi] @@ -1412,3 +1412,14 @@ A(x=0).__replace__(x="asdf") # E: Argument "x" to "__replace__" of "A" has inco A(x=0).__replace__(y=1) # E: Unexpected keyword argument "y" for "__replace__" of "A" [builtins fixtures/tuple.pyi] [typing fixtures/typing-namedtuple.pyi] + +[case testUnpackSelfNamedTuple] +import typing + +class Foo(typing.NamedTuple): + bar: int + def baz(self: typing.Self) -> None: + x, = self + reveal_type(x) # N: Revealed type is "builtins.int" +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-namedtuple.pyi] diff --git a/test-data/unit/check-newsemanal.test b/test-data/unit/check-newsemanal.test index 7cbed5637c3a0..511c7b003015f 100644 --- a/test-data/unit/check-newsemanal.test +++ b/test-data/unit/check-newsemanal.test @@ -1947,7 +1947,7 @@ class NTStr(NamedTuple): y: str t1: T -reveal_type(t1.__iter__) # N: Revealed type is "def () -> typing.Iterator[__main__.A]" +reveal_type(t1.__iter__) # N: Revealed type is "def () -> typing.Iterator[Union[__main__.B, __main__.C]]" t2: NTInt reveal_type(t2.__iter__) # N: Revealed type is "def () -> typing.Iterator[builtins.int]" @@ -1960,7 +1960,6 @@ t: Union[Tuple[int, int], Tuple[str, str]] for x in t: reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.str]" [builtins fixtures/for.pyi] -[out] [case testNewAnalyzerFallbackUpperBoundCheckAndFallbacks] from typing import TypeVar, Generic, Tuple @@ -1973,10 +1972,9 @@ S = TypeVar('S', bound='Tuple[G[A], ...]') class GG(Generic[S]): pass -g: GG[Tuple[G[B], G[C]]] \ - # E: Type argument "Tuple[G[B], G[C]]" of "GG" must be a subtype of "Tuple[G[A], ...]" \ - # E: Type argument "B" of "G" must be a subtype of "A" \ - # E: Type argument "C" of "G" must be a subtype of "A" +g: GG[Tuple[G[B], G[C]]] # E: Type argument "Tuple[G[B], G[C]]" of "GG" must be a subtype of "Tuple[G[A], ...]" \ + # E: Type argument "B" of "G" must be a subtype of "A" \ + # E: Type argument "C" of "G" must be a subtype of "A" T = TypeVar('T', bound=A, covariant=True) @@ -1984,7 +1982,7 @@ class G(Generic[T]): pass t: Tuple[G[B], G[C]] # E: Type argument "B" of "G" must be a subtype of "A" \ # E: Type argument "C" of "G" must be a subtype of "A" -reveal_type(t.__iter__) # N: Revealed type is "def () -> typing.Iterator[builtins.object]" +reveal_type(t.__iter__) # N: Revealed type is "def () -> typing.Iterator[__main__.G[__main__.B]]" [builtins fixtures/tuple.pyi] [case testNewAnalyzerClassKeywordsForward] @@ -2184,8 +2182,7 @@ from typing import TypeVar, Generic, Any T = TypeVar('T', bound='B[Any]') # The "int" error is because of typing fixture. T = TypeVar('T', bound='C') # E: Cannot redefine "T" as a type variable \ - # E: Invalid assignment target \ - # E: "int" not callable + # E: Invalid assignment target class B(Generic[T]): x: T @@ -2194,6 +2191,8 @@ class C: ... x: B[int] # E: Type argument "int" of "B" must be a subtype of "B[Any]" y: B[B[Any]] reveal_type(y.x) # N: Revealed type is "__main__.B[Any]" +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-full.pyi] [case testNewAnalyzerDuplicateTypeVarImportCycle] # flags: --disable-error-code used-before-def @@ -2216,12 +2215,13 @@ class C: ... x: B[int] y: B[B[Any]] reveal_type(y.x) +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-full.pyi] [out] tmp/b.py:8: error: Type argument "int" of "B" must be a subtype of "B[Any]" tmp/b.py:10: note: Revealed type is "b.B[Any]" tmp/a.py:5: error: Cannot redefine "T" as a type variable tmp/a.py:5: error: Invalid assignment target -tmp/a.py:5: error: "int" not callable [case testNewAnalyzerDuplicateTypeVarImportCycleWithAliases] # flags: --disable-error-code used-before-def diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index 7bca5cc7b5080..48d5996b226f3 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -373,7 +373,8 @@ def foo(t, s): pass class Wrapper(Generic[T]): @overload - def foo(self, t: List[T], s: T) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types + def foo(self, t: List[T], s: T) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types \ + # N: Flipping the order of overloads will fix this error @overload def foo(self, t: T, s: T) -> str: ... def foo(self, t, s): pass @@ -384,7 +385,8 @@ class Dummy(Generic[T]): pass # cause the constraint solver to not infer T = object like it did in the # first example? @overload -def bar(d: Dummy[T], t: List[T], s: T) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def bar(d: Dummy[T], t: List[T], s: T) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types \ + # N: Flipping the order of overloads will fix this error @overload def bar(d: Dummy[T], t: T, s: T) -> str: ... def bar(d: Dummy[T], t, s): pass @@ -1274,7 +1276,7 @@ f('x')() # E: "str" not callable f(1)() # E: "bool" not callable f(1.1) # E: No overload variant of "f" matches argument type "float" \ # N: Possible overload variants: \ - # N: def [T <: str] f(x: T) -> T \ + # N: def [T: str] f(x: T) -> T \ # N: def f(x: int) -> bool f(mystr())() # E: "mystr" not callable [builtins fixtures/primitives.pyi] @@ -1296,8 +1298,8 @@ def g(x: U, y: V) -> None: f(x)() # E: "mystr" not callable f(y) # E: No overload variant of "f" matches argument type "V" \ # N: Possible overload variants: \ - # N: def [T <: str] f(x: T) -> T \ - # N: def [T <: str] f(x: List[T]) -> None + # N: def [T: str] f(x: T) -> T \ + # N: def [T: str] f(x: List[T]) -> None a = f([x]) reveal_type(a) # N: Revealed type is "None" f([y]) # E: Value of type variable "T" of "f" cannot be "V" @@ -1325,8 +1327,9 @@ def h(x: Sequence[str]) -> int: pass @overload def h(x: Sequence[T]) -> None: pass # E: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader +# Safety of this highly depends on the implementation, so we lean towards being silent. @overload -def i(x: List[str]) -> int: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def i(x: List[str]) -> int: pass @overload def i(x: List[T]) -> None: pass [builtins fixtures/list.pyi] @@ -1348,7 +1351,7 @@ f(b'1')() # E: "str" not callable f(1.0) # E: No overload variant of "f" matches argument type "float" \ # N: Possible overload variants: \ # N: def f(x: int) -> int \ - # N: def [AnyStr in (bytes, str)] f(x: AnyStr) -> str + # N: def [AnyStr: (bytes, str)] f(x: AnyStr) -> str @overload def g(x: AnyStr, *a: AnyStr) -> None: pass @@ -1752,14 +1755,11 @@ reveal_type(f(d)) # N: Revealed type is "builtins.list[builtins.int]" from typing import overload, Any @overload -def f(*, x: int = 3, y: int = 3) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def f(*, x: int = 3, y: int = 3) -> int: ... @overload def f(**kwargs: str) -> str: ... def f(*args, **kwargs): pass -# Checking an overload flagged as unsafe is a bit weird, but this is the -# cleanest way to make sure 'Any' ambiguity checks work correctly with -# keyword arguments. a: Any i: int reveal_type(f(x=a, y=i)) # N: Revealed type is "builtins.int" @@ -2163,8 +2163,9 @@ from wrapper import * [file wrapper.pyi] from typing import overload +# Safety of this highly depends on the implementation, so we lean towards being silent. @overload -def foo1(*x: int) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def foo1(*x: int) -> int: ... @overload def foo1(x: int, y: int, z: int) -> str: ... @@ -2173,8 +2174,9 @@ def foo2(*x: int) -> int: ... @overload def foo2(x: int, y: str, z: int) -> str: ... +# Note: this is technically unsafe, but we don't report this for now. @overload -def bar1(x: int, y: int, z: int) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def bar1(x: int, y: int, z: int) -> str: ... @overload def bar1(*x: int) -> int: ... @@ -2248,7 +2250,7 @@ from wrapper import * from typing import overload @overload -def foo1(x: str) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def foo1(x: str) -> str: ... @overload def foo1(x: str, y: str = ...) -> int: ... @@ -2268,12 +2270,12 @@ from wrapper import * from typing import overload @overload -def foo1(*args: int) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def foo1(*args: int) -> int: ... @overload def foo1(**kwargs: int) -> str: ... @overload -def foo2(**kwargs: int) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def foo2(**kwargs: int) -> str: ... @overload def foo2(*args: int) -> int: ... [builtins fixtures/dict.pyi] @@ -2314,13 +2316,14 @@ def foo2(x: int, *args: int) -> str: ... @overload def foo2(*args2: str) -> int: ... +# The two examples are unsafe, but this is hard to detect. @overload -def foo3(*args: int) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def foo3(*args: int) -> int: ... @overload def foo3(x: int, *args2: int) -> str: ... @overload -def foo4(x: int, *args: int) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def foo4(x: int, *args: int) -> str: ... @overload def foo4(*args2: int) -> int: ... [builtins fixtures/tuple.pyi] @@ -2357,13 +2360,13 @@ def foo4(x: Other = ..., *args: str) -> int: ... from typing import overload @overload -def foo1(x: int = 0, y: int = 0) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def foo1(x: int = 0, y: int = 0) -> int: ... @overload def foo1(*xs: int) -> str: ... def foo1(*args): pass @overload -def foo2(*xs: int) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def foo2(*xs: int) -> str: ... @overload def foo2(x: int = 0, y: int = 0) -> int: ... def foo2(*args): pass @@ -2412,12 +2415,12 @@ from wrapper import * from typing import overload @overload -def foo1(x: str, y: str = ..., z: str = ...) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def foo1(x: str, y: str = ..., z: str = ...) -> str: ... @overload def foo1(*x: str) -> int: ... @overload -def foo2(*x: str) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def foo2(*x: str) -> int: ... @overload def foo2(x: str, y: str = ..., z: str = ...) -> str: ... @@ -2433,12 +2436,12 @@ from wrapper import * from typing import overload @overload -def foo1(x: str, y: str = ..., z: int = ...) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def foo1(x: str, y: str = ..., z: int = ...) -> str: ... @overload def foo1(*x: str) -> int: ... @overload -def foo2(x: str, y: str = ..., z: int = ...) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def foo2(x: str, y: str = ..., z: int = ...) -> str: ... @overload def foo2(*x: str) -> int: ... [builtins fixtures/tuple.pyi] @@ -2449,7 +2452,7 @@ from wrapper import * from typing import overload @overload -def foo1(*, x: str, y: str, z: str) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def foo1(*, x: str, y: str, z: str) -> str: ... @overload def foo1(**x: str) -> int: ... @@ -2481,12 +2484,12 @@ def foo2(**x: str) -> int: ... def foo2(*, x: str, y: str, z: int) -> str: ... @overload -def foo3(*, x: str, y: str, z: int = ...) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def foo3(*, x: str, y: str, z: int = ...) -> str: ... @overload def foo3(**x: str) -> int: ... @overload -def foo4(**x: str) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def foo4(**x: str) -> int: ... @overload def foo4(*, x: str, y: str, z: int = ...) -> str: ... [builtins fixtures/dict.pyi] @@ -2497,12 +2500,13 @@ from wrapper import * from typing import overload @overload -def foo1(x: str, *, y: str, z: str) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def foo1(x: str, *, y: str, z: str) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types \ + # N: Flipping the order of overloads will fix this error @overload def foo1(**x: str) -> int: ... @overload -def foo2(**x: str) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def foo2(**x: str) -> int: ... @overload def foo2(x: str, *, y: str, z: str) -> str: ... @@ -2798,7 +2802,8 @@ def h(x: List[Union[C, D]]) -> str: ... def h(x): ... @overload -def i(x: List[Union[A, B]]) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def i(x: List[Union[A, B]]) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types \ + # N: Flipping the order of overloads will fix this error @overload def i(x: List[Union[A, B, C]]) -> str: ... def i(x): ... @@ -2810,8 +2815,9 @@ from typing import TypeVar, overload T = TypeVar('T') +# Note: this is unsafe, but it is hard to detect. @overload -def f(x: int) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def f(x: int) -> str: ... @overload def f(x: T) -> T: ... def f(x): ... @@ -2827,14 +2833,15 @@ from typing import TypeVar, overload, List T = TypeVar('T') +# Note: first two examples are unsafe, but it is hard to detect. @overload -def f1(x: List[int]) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def f1(x: List[int]) -> str: ... @overload def f1(x: List[T]) -> T: ... def f1(x): ... @overload -def f2(x: List[int]) -> List[str]: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def f2(x: List[int]) -> List[str]: ... @overload def f2(x: List[T]) -> List[T]: ... def f2(x): ... @@ -2859,17 +2866,15 @@ from typing import TypeVar, overload, Generic T = TypeVar('T') class Wrapper(Generic[T]): + # Similar to above: this is unsafe, but it is hard to detect. @overload - def f(self, x: int) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types + def f(self, x: int) -> str: ... @overload def f(self, x: T) -> T: ... def f(self, x): ... - # TODO: This shouldn't trigger an error message? - # Related to testTypeCheckOverloadImplementationTypeVarDifferingUsage2? - # See https://github.com/python/mypy/issues/5510 @overload - def g(self, x: int) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types + def g(self, x: int) -> int: ... @overload def g(self, x: T) -> T: ... def g(self, x): ... @@ -2880,28 +2885,27 @@ from typing import TypeVar, overload, Generic, List T = TypeVar('T') class Wrapper(Generic[T]): + # Similar to above: first two examples are unsafe, but it is hard to detect. @overload - def f1(self, x: List[int]) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types + def f1(self, x: List[int]) -> str: ... @overload def f1(self, x: List[T]) -> T: ... def f1(self, x): ... @overload - def f2(self, x: List[int]) -> List[str]: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types + def f2(self, x: List[int]) -> List[str]: ... @overload def f2(self, x: List[T]) -> List[T]: ... def f2(self, x): ... - # TODO: This shouldn't trigger an error message? - # See https://github.com/python/mypy/issues/5510 @overload - def g1(self, x: List[int]) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types + def g1(self, x: List[int]) -> int: ... @overload def g1(self, x: List[T]) -> T: ... def g1(self, x): ... @overload - def g2(self, x: List[int]) -> List[int]: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types + def g2(self, x: List[int]) -> List[int]: ... @overload def g2(self, x: List[T]) -> List[T]: ... def g2(self, x): ... @@ -3078,13 +3082,14 @@ class C: pass S = TypeVar('S', A, B) @overload -def f(x: S) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def f(x: S) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types \ + # N: Flipping the order of overloads will fix this error @overload def f(x: Union[B, C]) -> str: ... def f(x): pass @overload -def g(x: Union[B, C]) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def g(x: Union[B, C]) -> int: ... @overload def g(x: S) -> str: ... def g(x): pass @@ -3607,7 +3612,7 @@ def test(x: T) -> T: from typing import overload, Optional @overload -def f(x: None) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def f(x: None) -> int: ... @overload def f(x: object) -> str: ... def f(x): ... @@ -3632,7 +3637,7 @@ reveal_type(g(c)) # N: Revealed type is "builtins.str" from typing import overload, Optional @overload -def f(x: None) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def f(x: None) -> int: ... @overload def f(x: object) -> str: ... def f(x): ... @@ -3978,7 +3983,7 @@ from typing import overload, Any, Optional, Union class FakeAttribute: @overload - def dummy(self, instance: None, owner: Any) -> 'FakeAttribute': ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types + def dummy(self, instance: None, owner: Any) -> 'FakeAttribute': ... @overload def dummy(self, instance: object, owner: Any) -> int: ... def dummy(self, instance: Optional[object], owner: Any) -> Union['FakeAttribute', int]: ... @@ -4542,6 +4547,23 @@ reveal_type(Child().foo("...")) # N: Revealed type is "builtins.st reveal_type(Child().foo(x)) # N: Revealed type is "Union[__main__.Child, builtins.str]" reveal_type(Child().foo(3).child_only()) # N: Revealed type is "builtins.int" +[case testOverloadAndSelfTypesGenericNoOverlap] +from typing import Generic, TypeVar, Any, overload, Self, Union + +T = TypeVar("T") +class C(Generic[T]): + @overload + def get(self, obj: None) -> Self: ... + @overload + def get(self, obj: Any) -> T: ... + def get(self, obj: Union[Any, None]) -> Union[T, Self]: + return self + +class D(C[int]): ... +d: D +reveal_type(d.get(None)) # N: Revealed type is "__main__.D" +reveal_type(d.get("whatever")) # N: Revealed type is "builtins.int" + [case testOverloadAndClassTypes] from typing import overload, Union, TypeVar, Type @@ -4886,7 +4908,7 @@ T = TypeVar('T') def f() -> None: @overload - def g(x: str) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types + def g(x: str) -> int: ... @overload def g(x: T) -> T: ... def g(x): @@ -4927,7 +4949,7 @@ x: Any reveal_type(attr(x)) # N: Revealed type is "Any" attr("hi", 1) # E: No overload variant of "attr" matches argument types "str", "int" \ # N: Possible overload variants: \ - # N: def [T in (int, float)] attr(default: T = ..., blah: int = ...) -> T \ + # N: def [T: (int, float)] attr(default: T, blah: int = ...) -> T \ # N: def attr(default: Any = ...) -> int [file lib.pyi] from typing import overload, Any, TypeVar @@ -4935,7 +4957,7 @@ from typing import overload, Any, TypeVar T = TypeVar('T', int, float) @overload -def attr(default: T = ..., blah: int = ...) -> T: ... +def attr(default: T, blah: int = ...) -> T: ... @overload def attr(default: Any = ...) -> int: ... [out] @@ -4950,7 +4972,7 @@ x: Any reveal_type(attr(x)) # N: Revealed type is "Any" attr("hi", 1) # E: No overload variant of "attr" matches argument types "str", "int" \ # N: Possible overload variants: \ - # N: def [T <: int] attr(default: T = ..., blah: int = ...) -> T \ + # N: def [T: int] attr(default: T = ..., blah: int = ...) -> T \ # N: def attr(default: Any = ...) -> int [file lib.pyi] from typing import overload, TypeVar, Any @@ -4991,7 +5013,7 @@ children: List[Child] parents: List[Parent] @overload -def f(x: Child) -> List[Child]: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def f(x: Child) -> List[Child]: pass @overload def f(x: Parent) -> List[Parent]: pass def f(x: Union[Child, Parent]) -> Union[List[Child], List[Parent]]: @@ -5302,7 +5324,7 @@ def f1(g: G[A, B]) -> B: ... def f1(g: Any) -> Any: ... @overload -def f2(g: G[A, Any]) -> A: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def f2(g: G[A, Any]) -> A: ... @overload def f2(g: G[A, B], x: int = ...) -> B: ... def f2(g: Any, x: int = ...) -> Any: ... @@ -6483,7 +6505,7 @@ P = ParamSpec("P") R = TypeVar("R") @overload -def func(x: Callable[Concatenate[Any, P], R]) -> Callable[P, R]: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def func(x: Callable[Concatenate[Any, P], R]) -> Callable[P, R]: ... @overload def func(x: Callable[P, R]) -> Callable[Concatenate[str, P], R]: ... def func(x: Callable[..., R]) -> Callable[..., R]: ... @@ -6693,3 +6715,38 @@ class B: def f(self, *args, **kwargs): pass [builtins fixtures/tuple.pyi] + +[case testOverloadsSafeOverlapAllowed] +from lib import * +[file lib.pyi] +from typing import overload + +@overload +def bar(x: object) -> object: ... +@overload +def bar(x: int = ...) -> int: ... + +[case testOverloadsInvariantOverlapAllowed] +from lib import * +[file lib.pyi] +from typing import overload, List + +@overload +def bar(x: List[int]) -> List[int]: ... +@overload +def bar(x: List[object]) -> List[object]: ... + +[case testOverloadsNoneAnyOverlapAllowed] +from lib import * +[file lib.pyi] +from typing import overload, Any + +@overload +def foo(x: None) -> int: ... +@overload +def foo(x: object) -> str: ... + +@overload +def bar(x: int) -> int: ... +@overload +def bar(x: Any) -> str: ... diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index 1ea60b700cf1e..0a3feb36f405d 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -23,19 +23,19 @@ x: P # E: ParamSpec "P" is unbound def foo1(x: Callable[P, int]) -> Callable[P, str]: ... def foo2(x: P) -> P: ... # E: Invalid location for ParamSpec "P" \ - # N: You can use ParamSpec as the first argument to Callable, e.g., 'Callable[P, int]' + # N: You can use ParamSpec as the first argument to Callable, e.g., "Callable[P, int]" def foo3(x: Concatenate[int, P]) -> int: ... # E: Invalid location for Concatenate \ # N: You can use Concatenate as the first argument to Callable def foo4(x: List[P]) -> None: ... # E: Invalid location for ParamSpec "P" \ - # N: You can use ParamSpec as the first argument to Callable, e.g., 'Callable[P, int]' + # N: You can use ParamSpec as the first argument to Callable, e.g., "Callable[P, int]" def foo5(x: Callable[[int, str], P]) -> None: ... # E: Invalid location for ParamSpec "P" \ - # N: You can use ParamSpec as the first argument to Callable, e.g., 'Callable[P, int]' + # N: You can use ParamSpec as the first argument to Callable, e.g., "Callable[P, int]" def foo6(x: Callable[[P], int]) -> None: ... # E: Invalid location for ParamSpec "P" \ - # N: You can use ParamSpec as the first argument to Callable, e.g., 'Callable[P, int]' + # N: You can use ParamSpec as the first argument to Callable, e.g., "Callable[P, int]" [builtins fixtures/paramspec.pyi] [case testParamSpecImports] @@ -901,8 +901,8 @@ class A: def func(self, action: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs) -> _R: ... -reveal_type(A.func) # N: Revealed type is "def [_P, _R] (self: __main__.A, action: def (*_P.args, **_P.kwargs) -> _R`4, *_P.args, **_P.kwargs) -> _R`4" -reveal_type(A().func) # N: Revealed type is "def [_P, _R] (action: def (*_P.args, **_P.kwargs) -> _R`8, *_P.args, **_P.kwargs) -> _R`8" +reveal_type(A.func) # N: Revealed type is "def [_P, _R] (self: __main__.A, action: def (*_P.args, **_P.kwargs) -> _R`6, *_P.args, **_P.kwargs) -> _R`6" +reveal_type(A().func) # N: Revealed type is "def [_P, _R] (action: def (*_P.args, **_P.kwargs) -> _R`10, *_P.args, **_P.kwargs) -> _R`10" def f(x: int) -> int: ... @@ -933,8 +933,8 @@ class A: def func(self, action: Job[_P, None]) -> Job[_P, None]: ... -reveal_type(A.func) # N: Revealed type is "def [_P] (self: __main__.A, action: __main__.Job[_P`3, None]) -> __main__.Job[_P`3, None]" -reveal_type(A().func) # N: Revealed type is "def [_P] (action: __main__.Job[_P`5, None]) -> __main__.Job[_P`5, None]" +reveal_type(A.func) # N: Revealed type is "def [_P] (self: __main__.A, action: __main__.Job[_P`4, None]) -> __main__.Job[_P`4, None]" +reveal_type(A().func) # N: Revealed type is "def [_P] (action: __main__.Job[_P`6, None]) -> __main__.Job[_P`6, None]" reveal_type(A().func(Job(lambda x: x))) # N: Revealed type is "__main__.Job[[x: Any], None]" def f(x: int, y: int) -> None: ... @@ -1096,7 +1096,7 @@ j = Job(generic_f) reveal_type(j) # N: Revealed type is "__main__.Job[[x: _T`-1]]" jf = j.into_callable() -reveal_type(jf) # N: Revealed type is "def [_T] (x: _T`3)" +reveal_type(jf) # N: Revealed type is "def [_T] (x: _T`4)" reveal_type(jf(1)) # N: Revealed type is "None" [builtins fixtures/paramspec.pyi] @@ -1520,7 +1520,7 @@ T = TypeVar("T") A = List[T] def f(x: A[[int, str]]) -> None: ... # E: Bracketed expression "[...]" is not valid as a type def g(x: A[P]) -> None: ... # E: Invalid location for ParamSpec "P" \ - # N: You can use ParamSpec as the first argument to Callable, e.g., 'Callable[P, int]' + # N: You can use ParamSpec as the first argument to Callable, e.g., "Callable[P, int]" C = Callable[P, T] x: C[int] # E: Bad number of arguments for type alias, expected 2, given 1 @@ -1600,7 +1600,7 @@ from typing_extensions import Concatenate, ParamSpec P = ParamSpec("P") @overload -def command() -> Callable[[Callable[Concatenate[object, object, P], object]], None]: # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def command() -> Callable[[Callable[Concatenate[object, object, P], object]], None]: ... @overload diff --git a/test-data/unit/check-protocols.test b/test-data/unit/check-protocols.test index e73add454a679..ee7556461fd33 100644 --- a/test-data/unit/check-protocols.test +++ b/test-data/unit/check-protocols.test @@ -2182,7 +2182,7 @@ main:11: note: Following member(s) of "B" have conflicts: main:11: note: Expected: main:11: note: def [T] f(self, x: T) -> None main:11: note: Got: -main:11: note: def [S <: int, T] f(self, x: S, y: T) -> None +main:11: note: def [S: int, T] f(self, x: S, y: T) -> None [case testProtocolIncompatibilityWithGenericRestricted] from typing import Protocol, TypeVar @@ -2202,7 +2202,7 @@ main:11: note: Following member(s) of "B" have conflicts: main:11: note: Expected: main:11: note: def [T] f(self, x: T) -> None main:11: note: Got: -main:11: note: def [S in (int, str), T] f(self, x: S, y: T) -> None +main:11: note: def [S: (int, str), T] f(self, x: S, y: T) -> None [case testProtocolIncompatibilityWithManyOverloads] from typing import Protocol, overload diff --git a/test-data/unit/check-python311.test b/test-data/unit/check-python311.test index 2d1a09ef33363..28951824999f8 100644 --- a/test-data/unit/check-python311.test +++ b/test-data/unit/check-python311.test @@ -171,5 +171,5 @@ reveal_type(x3) # N: Revealed type is "def (*Any) -> builtins.int" IntList = List[int] Alias4 = Callable[[*IntList], int] # E: "List[int]" cannot be unpacked (must be tuple or TypeVarTuple) x4: Alias4[int] # E: Bad number of arguments for type alias, expected 0, given 1 -reveal_type(x4) # N: Revealed type is "def (*Unpack[builtins.tuple[Any, ...]]) -> builtins.int" +reveal_type(x4) # N: Revealed type is "def (*Any) -> builtins.int" [builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-python312.test b/test-data/unit/check-python312.test index 8443aadb69059..27027d30a684a 100644 --- a/test-data/unit/check-python312.test +++ b/test-data/unit/check-python312.test @@ -41,7 +41,8 @@ reveal_type(g(1)) # E: Value of type "Coroutine[Any, Any, Any]" must be used \ [case test695TypeVar] from typing import Callable -type Alias1[T: int] = list[T] # E: PEP 695 type aliases are not yet supported +type Alias1[T: int] = list[T] # E: PEP 695 type aliases are not yet supported \ + # E: Name "T" is not defined type Alias2[**P] = Callable[P, int] # E: PEP 695 type aliases are not yet supported \ # E: Value of type "int" is not indexable \ # E: Name "P" is not defined @@ -52,7 +53,9 @@ class Cls1[T: int]: ... # E: PEP 695 generics are not yet supported class Cls2[**P]: ... # E: PEP 695 generics are not yet supported class Cls3[*Ts]: ... # E: PEP 695 generics are not yet supported -def func1[T: int](x: T) -> T: ... # E: PEP 695 generics are not yet supported +def func1[T: int](x: T) -> T: ... # E: PEP 695 generics are not yet supported \ + # E: Name "T" is not defined + def func2[**P](x: Callable[P, int]) -> Callable[P, str]: ... # E: PEP 695 generics are not yet supported \ # E: The first argument to Callable must be a list of types, parameter specification, or "..." \ # N: See https://mypy.readthedocs.io/en/stable/kinds_of_types.html#callable-types-and-lambdas \ @@ -504,6 +507,7 @@ reveal_type(a3) # N: Revealed type is "__main__.D[builtins.str, __main__.C[buil type A4 = int | str a4: A4 reveal_type(a4) # N: Revealed type is "Union[builtins.int, builtins.str]" +[builtins fixtures/type.pyi] [case testPEP695TypeAliasWithUnusedTypeParams] # flags: --enable-incomplete-feature=NewGenericSyntax @@ -531,6 +535,8 @@ a: A reveal_type(a) # N: Revealed type is "__main__.C" class C: pass +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-full.pyi] [case testPEP695TypeAliasForwardReference3] # flags: --enable-incomplete-feature=NewGenericSyntax @@ -579,12 +585,15 @@ reveal_type(a) # N: Revealed type is "Any" [case testPEP695TypeAliasInvalidType] # flags: --enable-incomplete-feature=NewGenericSyntax -type A = int | 1 # E: Invalid type: try using Literal[1] instead? +type A = int | 1 # E: Invalid type: try using Literal[1] instead? \ + # E: Unsupported operand types for | ("Type[int]" and "int") + a: A reveal_type(a) # N: Revealed type is "Union[builtins.int, Any]" type B = int + str # E: Invalid type alias: expression is not a valid type b: B reveal_type(b) # N: Revealed type is "Any" +[builtins fixtures/type.pyi] [case testPEP695TypeAliasBoundForwardReference] # mypy: enable-incomplete-feature=NewGenericSyntax @@ -809,6 +818,7 @@ type C[**P] = Callable[P, int] f: C[[str, int | None]] reveal_type(f) # N: Revealed type is "def (builtins.str, Union[builtins.int, None]) -> builtins.int" [builtins fixtures/tuple.pyi] +[typing fixtures/typing-full.pyi] [case testPEP695TypeVarTuple] # flags: --enable-incomplete-feature=NewGenericSyntax @@ -1062,7 +1072,7 @@ from typing import cast def f[T]( x: T = T # E: Name "T" is not defined \ - # E: Incompatible default for argument "x" (default has type "object", argument has type "T") + # E: Incompatible default for argument "x" (default has type "TypeVar", argument has type "T") ) -> T: return x @@ -1072,6 +1082,8 @@ def g[T](x: T = cast(T, None)) -> T: # E: Name "T" is not defined class C: def m[T](self, x: T = cast(T, None)) -> T: # E: Name "T" is not defined return x +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-full.pyi] [case testPEP695ListComprehension] # mypy: enable-incomplete-feature=NewGenericSyntax @@ -1174,6 +1186,7 @@ class C[T]: pass type B[T] = C[T] | list[B[T]] b: B[int] reveal_type(b) # N: Revealed type is "Union[__main__.C[builtins.int], builtins.list[...]]" +[builtins fixtures/type.pyi] [case testPEP695BadRecursiveTypeAlias] # mypy: enable-incomplete-feature=NewGenericSyntax @@ -1184,6 +1197,8 @@ a: A reveal_type(a) # N: Revealed type is "Any" b: B reveal_type(b) # N: Revealed type is "Any" +[builtins fixtures/type.pyi] +[typing fixtures/typing-full.pyi] [case testPEP695RecursiveTypeAliasForwardReference] # mypy: enable-incomplete-feature=NewGenericSyntax @@ -1272,6 +1287,7 @@ reveal_type(a) # N: Revealed type is "builtins.list[Any]" type B = tuple[*Ts] # E: All type parameters should be declared ("Ts" not declared) type C = Callable[P, None] # E: All type parameters should be declared ("P" not declared) [builtins fixtures/tuple.pyi] +[typing fixtures/typing-full.pyi] [case testPEP695NonGenericAliasToGenericClass] # mypy: enable-incomplete-feature=NewGenericSyntax @@ -1313,3 +1329,330 @@ reveal_type(E[int]().mm(b'x')) # N: Revealed type is "Tuple[__main__.E[builtins reveal_type(F[str]().m()) # N: Revealed type is "__main__.F[builtins.str]" reveal_type(F[str]().mm(b'x')) # N: Revealed type is "Tuple[__main__.F[builtins.str], builtins.bytes]" [builtins fixtures/tuple.pyi] + +[case testPEP695CallAlias] +# mypy: enable-incomplete-feature=NewGenericSyntax + +class C: + def __init__(self, x: str) -> None: ... +type A = C + +class D[T]: pass +type B[T] = D[T] + +reveal_type(A) # N: Revealed type is "typing.TypeAliasType" +reveal_type(B) # N: Revealed type is "typing.TypeAliasType" +reveal_type(B[int]) # N: Revealed type is "typing.TypeAliasType" + +A(1) # E: "TypeAliasType" not callable +B[int]() # E: "TypeAliasType" not callable + +A2 = C +B2 = D +A2(1) # E: Argument 1 to "C" has incompatible type "int"; expected "str" +B2[int]() +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-full.pyi] + +[case testPEP695IncrementalTypeAliasKinds] +# flags: --enable-incomplete-feature=NewGenericSyntax +import a + +[file a.py] +from b import A + +[file a.py.2] +from b import A, B, C +A() +B() +C() + +[file b.py] +from typing_extensions import TypeAlias +type A = int +B = int +C: TypeAlias = int +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-full.pyi] +[out2] +tmp/a.py:2: error: "TypeAliasType" not callable + +[case testPEP695TypeAliasBoundAndValueChecking] +# flags: --enable-incomplete-feature=NewGenericSyntax +from typing import Any, cast + +class C: pass +class D(C): pass + +type A[T: C] = list[T] +a1: A +reveal_type(a1) # N: Revealed type is "builtins.list[Any]" +a2: A[Any] +a3: A[C] +a4: A[D] +a5: A[object] # E: Type argument "object" of "A" must be a subtype of "C" +a6: A[int] # E: Type argument "int" of "A" must be a subtype of "C" + +x1 = cast(A[C], a1) +x2 = cast(A[None], a1) # E: Type argument "None" of "A" must be a subtype of "C" + +type A2[T: (int, C)] = list[T] +b1: A2 +reveal_type(b1) # N: Revealed type is "builtins.list[Any]" +b2: A2[Any] +b3: A2[int] +b4: A2[C] +b5: A2[D] # E: Value of type variable "T" of "A2" cannot be "D" +b6: A2[object] # E: Value of type variable "T" of "A2" cannot be "object" + +list[A2[int]]() +list[A2[None]]() # E: Invalid type argument value for "A2" + +class N(int): pass + +type A3[T: C, S: (int, str)] = T | S +c1: A3[C, int] +c2: A3[D, str] +c3: A3[C, N] # E: Value of type variable "S" of "A3" cannot be "N" +c4: A3[int, str] # E: Type argument "int" of "A3" must be a subtype of "C" +[builtins fixtures/type.pyi] +[typing fixtures/typing-full.pyi] + +[case testPEP695TypeAliasInClassBodyOrFunction] +# flags: --enable-incomplete-feature=NewGenericSyntax + +class C: + type A = int + type B[T] = list[T] | None + a: A + b: B[str] + + def method(self) -> None: + v: C.A + reveal_type(v) # N: Revealed type is "builtins.int" + +reveal_type(C.a) # N: Revealed type is "builtins.int" +reveal_type(C.b) # N: Revealed type is "Union[builtins.list[builtins.str], None]" + +C.A = str # E: Incompatible types in assignment (expression has type "Type[str]", variable has type "TypeAliasType") + +x: C.A +y: C.B[int] +reveal_type(x) # N: Revealed type is "builtins.int" +reveal_type(y) # N: Revealed type is "Union[builtins.list[builtins.int], None]" + +def f() -> None: + type A = int + type B[T] = list[T] | None + a: A + reveal_type(a) # N: Revealed type is "builtins.int" + + def g() -> None: + b: B[int] + reveal_type(b) # N: Revealed type is "Union[builtins.list[builtins.int], None]" + +class D: + def __init__(self) -> None: + type A = int + self.a: A = 0 + type B[T] = list[T] + self.b: B[int] = [1] + +reveal_type(D().a) # N: Revealed type is "builtins.int" +reveal_type(D().b) # N: Revealed type is "builtins.list[builtins.int]" + +class E[T]: + type X = list[T] # E: All type parameters should be declared ("T" not declared) + + def __init__(self) -> None: + type A = list[T] # E: All type parameters should be declared ("T" not declared) + self.a: A + +reveal_type(E[str]().a) # N: Revealed type is "builtins.list[Any]" +[builtins fixtures/type.pyi] +[typing fixtures/typing-full.pyi] + +[case testPEP695RedefineAsTypeAlias1] +# flags: --enable-incomplete-feature=NewGenericSyntax +class C: pass +type C = int # E: Name "C" already defined on line 2 + +A = 0 +type A = str # E: Name "A" already defined on line 5 +reveal_type(A) # N: Revealed type is "builtins.int" + +[case testPEP695RedefineAsTypeAlias2] +# flags: --enable-incomplete-feature=NewGenericSyntax +from m import D +type D = int # E: Name "D" already defined (possibly by an import) +a: D +reveal_type(a) # N: Revealed type is "m.D" +[file m.py] +class D: pass + +[case testPEP695RedefineAsTypeAlias3] +# flags: --enable-incomplete-feature=NewGenericSyntax +D = list["Forward"] +type D = int # E: Name "D" already defined on line 2 +Forward = str +x: D +reveal_type(x) # N: Revealed type is "builtins.list[builtins.str]" + +[case testPEP695MultiDefinitionsForTypeAlias] +# flags: --enable-incomplete-feature=NewGenericSyntax +if int(): + type A[T] = list[T] +else: + type A[T] = str # E: Name "A" already defined on line 3 +x: T # E: Name "T" is not defined +a: A[int] +reveal_type(a) # N: Revealed type is "builtins.list[builtins.int]" + +[case testPEP695UndefinedNameInAnnotation] +# flags: --enable-incomplete-feature=NewGenericSyntax +def f[T](x: foobar, y: T) -> T: ... # E: Name "foobar" is not defined +reveal_type(f) # N: Revealed type is "def [T] (x: Any, y: T`-1) -> T`-1" + +[case testPEP695WrongNumberOfConstrainedTypes] +# flags: --enable-incomplete-feature=NewGenericSyntax +type A[T: ()] = list[T] # E: Type variable must have at least two constrained types +a: A[int] +reveal_type(a) # N: Revealed type is "builtins.list[builtins.int]" + +type B[T: (int,)] = list[T] # E: Type variable must have at least two constrained types +b: B[str] +reveal_type(b) # N: Revealed type is "builtins.list[builtins.str]" + +[case testPEP695UsingTypeVariableInOwnBoundOrConstraint] +# flags: --enable-incomplete-feature=NewGenericSyntax +type A[T: list[T]] = str # E: Name "T" is not defined +type B[S: (list[S], str)] = str # E: Name "S" is not defined +type C[T, S: list[T]] = str # E: Name "T" is not defined + +def f[T: T](x: T) -> T: ... # E: Name "T" is not defined +class D[T: T]: # E: Name "T" is not defined + pass + +[case testPEP695InvalidType] +# flags: --enable-incomplete-feature=NewGenericSyntax +def f[T: 1](x: T) -> T: ... # E: Invalid type: try using Literal[1] instead? +class C[T: (int, (1 + 2))]: pass # E: Invalid type comment or annotation +type A = list[1] # E: Invalid type: try using Literal[1] instead? +type B = (1 + 2) # E: Invalid type alias: expression is not a valid type +a: A +reveal_type(a) # N: Revealed type is "builtins.list[Any]" +b: B +reveal_type(b) # N: Revealed type is "Any" + +[case testPEP695GenericNamedTuple] +# flags: --enable-incomplete-feature=NewGenericSyntax +from typing import NamedTuple + +# Invariant because of the signature of the generated _replace method +class N[T](NamedTuple): + x: T + y: int + +a: N[object] +reveal_type(a.x) # N: Revealed type is "builtins.object" +b: N[int] +reveal_type(b.x) # N: Revealed type is "builtins.int" +if int(): + a = b # E: Incompatible types in assignment (expression has type "N[int]", variable has type "N[object]") +if int(): + b = a # E: Incompatible types in assignment (expression has type "N[object]", variable has type "N[int]") + +class M[T: (int, str)](NamedTuple): + x: T + +c: M[int] +d: M[str] +e: M[bool] # E: Value of type variable "T" of "M" cannot be "bool" + +[builtins fixtures/tuple.pyi] + +[case testPEP695GenericTypedDict] +# flags: --enable-incomplete-feature=NewGenericSyntax +from typing import TypedDict + +class D[T](TypedDict): + x: T + y: int + +class E[T: str](TypedDict): + x: T + y: int + +a: D[object] +reveal_type(a["x"]) # N: Revealed type is "builtins.object" +b: D[int] +reveal_type(b["x"]) # N: Revealed type is "builtins.int" +c: E[str] +d: E[int] # E: Type argument "int" of "E" must be a subtype of "str" +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-full.pyi] + +[case testCurrentClassWorksAsBound] +# flags: --enable-incomplete-feature=NewGenericSyntax +from typing import Protocol + +class Comparable[T: Comparable](Protocol): + def compare(self, other: T) -> bool: ... + +class Good: + def compare(self, other: Good) -> bool: ... + +x: Comparable[Good] +y: Comparable[int] # E: Type argument "int" of "Comparable" must be a subtype of "Comparable[Any]" + +[case testPEP695TypeAliasWithDifferentTargetTypes] +# flags: --enable-incomplete-feature=NewGenericSyntax +import types # We need GenericAlias from here, and test stubs don't bring in 'types' +from typing import Any, Callable, List, Literal, TypedDict + +# Test that various type expressions don't generate false positives as type alias +# values, as they are type checked as expressions. There is a similar test case in +# pythoneval.test that uses typeshed stubs. + +class C[T]: pass + +class TD(TypedDict): + x: int + +type A1 = type[int] +type A2 = type[int] | None +type A3 = None | type[int] +type A4 = type[Any] + +type B1[**P, R] = Callable[P, R] | None +type B2[**P, R] = None | Callable[P, R] +type B3 = Callable[[str], int] +type B4 = Callable[..., int] + +type C1 = A1 | None +type C2 = None | A1 + +type D1 = Any | None +type D2 = None | Any + +type E1 = List[int] +type E2 = List[int] | None +type E3 = None | List[int] + +type F1 = Literal[1] +type F2 = Literal['x'] | None +type F3 = None | Literal[True] + +type G1 = tuple[int, Any] +type G2 = tuple[int, Any] | None +type G3 = None | tuple[int, Any] + +type H1 = TD +type H2 = TD | None +type H3 = None | TD + +type I1 = C[int] +type I2 = C[Any] | None +type I3 = None | C[TD] +[builtins fixtures/type.pyi] +[typing fixtures/typing-full.pyi] diff --git a/test-data/unit/check-python38.test b/test-data/unit/check-python38.test index 0f1cbb6e81c4a..dfb918defb0a5 100644 --- a/test-data/unit/check-python38.test +++ b/test-data/unit/check-python38.test @@ -297,8 +297,7 @@ def f(x: int = (c := 4)) -> int: z2: NT # E: Variable "NT" is not valid as a type \ # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases - if Alias := int: # E: Function "Alias" could always be true in boolean context \ - # E: Function "int" could always be true in boolean context + if Alias := int: # E: Function "Alias" could always be true in boolean context z3: Alias # E: Variable "Alias" is not valid as a type \ # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases diff --git a/test-data/unit/check-recursive-types.test b/test-data/unit/check-recursive-types.test index 84593933a2dea..d5c8acd1bc15d 100644 --- a/test-data/unit/check-recursive-types.test +++ b/test-data/unit/check-recursive-types.test @@ -440,7 +440,7 @@ from typing import NamedTuple, TypeVar, Tuple NT = NamedTuple("NT", [("x", NT), ("y", int)]) nt: NT reveal_type(nt) # N: Revealed type is "Tuple[..., builtins.int, fallback=__main__.NT]" -reveal_type(nt.x) # N: Revealed type is "Tuple[Tuple[..., builtins.int, fallback=__main__.NT], builtins.int, fallback=__main__.NT]" +reveal_type(nt.x) # N: Revealed type is "Tuple[..., builtins.int, fallback=__main__.NT]" reveal_type(nt[0]) # N: Revealed type is "Tuple[Tuple[..., builtins.int, fallback=__main__.NT], builtins.int, fallback=__main__.NT]" y: str if nt.x is not None: @@ -942,3 +942,55 @@ NotFilter = Tuple[Literal["not"], "NotFilter"] n: NotFilter reveal_type(n[1][1][0]) # N: Revealed type is "Literal['not']" [builtins fixtures/tuple.pyi] + +[case testNoCrashOnRecursiveAliasWithNone] +# flags: --strict-optional +from typing import Union, Generic, TypeVar, Optional + +T = TypeVar("T") +class A(Generic[T]): ... +class B(Generic[T]): ... + +Z = Union[A[Z], B[Optional[Z]]] +X = Union[A[Optional[X]], B[Optional[X]]] + +z: Z +x: X +reveal_type(z) # N: Revealed type is "Union[__main__.A[...], __main__.B[Union[..., None]]]" +reveal_type(x) # N: Revealed type is "Union[__main__.A[Union[..., None]], __main__.B[Union[..., None]]]" + +[case testRecursiveTupleFallback1] +from typing import NewType, Tuple, Union + +T1 = NewType("T1", str) +T2 = Tuple[T1, "T4", "T4"] +T3 = Tuple[str, "T4", "T4"] +T4 = Union[T2, T3] +[builtins fixtures/tuple.pyi] + +[case testRecursiveTupleFallback2] +from typing import NewType, Tuple, Union + +T1 = NewType("T1", str) +class T2(Tuple[T1, "T4", "T4"]): ... +T3 = Tuple[str, "T4", "T4"] +T4 = Union[T2, T3] +[builtins fixtures/tuple.pyi] + +[case testRecursiveTupleFallback3] +from typing import NewType, Tuple, Union + +T1 = NewType("T1", str) +T2 = Tuple[T1, "T4", "T4"] +class T3(Tuple[str, "T4", "T4"]): ... +T4 = Union[T2, T3] +[builtins fixtures/tuple.pyi] + +[case testRecursiveTupleFallback4] +from typing import NewType, Tuple, Union + +T1 = NewType("T1", str) +class T2(Tuple[T1, "T4", "T4"]): ... +class T3(Tuple[str, "T4", "T4"]): ... +T4 = Union[T2, T3] +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-redefine.test b/test-data/unit/check-redefine.test index e3f1b976d4e98..b7642d30efc8d 100644 --- a/test-data/unit/check-redefine.test +++ b/test-data/unit/check-redefine.test @@ -270,14 +270,17 @@ def f() -> None: from typing import TypeVar def f() -> None: x = TypeVar('x') - x = 1 # E: Invalid assignment target - reveal_type(x) # N: Revealed type is "builtins.int" + x = 1 # E: Invalid assignment target \ + # E: Incompatible types in assignment (expression has type "int", variable has type "TypeVar") + reveal_type(x) # N: Revealed type is "typing.TypeVar" y = 1 # NOTE: '"int" not callable' is due to test stubs y = TypeVar('y') # E: Cannot redefine "y" as a type variable \ - # E: "int" not callable + # E: Incompatible types in assignment (expression has type "TypeVar", variable has type "int") def h(a: y) -> y: return a # E: Variable "y" is not valid as a type \ # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-full.pyi] [case testCannotRedefineVarAsModule] # flags: --allow-redefinition diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test index e49a7a0e2e2f9..1480c83b22723 100644 --- a/test-data/unit/check-selftype.test +++ b/test-data/unit/check-selftype.test @@ -2071,3 +2071,64 @@ p: Partial reveal_type(p()) # N: Revealed type is "Never" p2: Partial2 reveal_type(p2(42)) # N: Revealed type is "builtins.int" + +[case testAccessingSelfClassVarInClassMethod] +from typing import Self, ClassVar, Type, TypeVar + +T = TypeVar("T", bound="Foo") + +class Foo: + instance: ClassVar[Self] + @classmethod + def get_instance(cls) -> Self: + return reveal_type(cls.instance) # N: Revealed type is "Self`0" + @classmethod + def get_instance_old(cls: Type[T]) -> T: + return reveal_type(cls.instance) # N: Revealed type is "T`-1" + +class Bar(Foo): + extra: int + + @classmethod + def get_instance(cls) -> Self: + reveal_type(cls.instance.extra) # N: Revealed type is "builtins.int" + return cls.instance + + @classmethod + def other(cls) -> None: + reveal_type(cls.instance) # N: Revealed type is "Self`0" + reveal_type(cls.instance.extra) # N: Revealed type is "builtins.int" + +reveal_type(Bar.instance) # N: Revealed type is "__main__.Bar" +[builtins fixtures/classmethod.pyi] + +[case testAccessingSelfClassVarInClassMethodTuple] +from typing import Self, ClassVar, Tuple + +class C(Tuple[int, str]): + x: Self + y: ClassVar[Self] + + @classmethod + def bar(cls) -> None: + reveal_type(cls.y) # N: Revealed type is "Self`0" + @classmethod + def bar_self(self) -> Self: + return reveal_type(self.y) # N: Revealed type is "Self`0" + +c: C +reveal_type(c.x) # N: Revealed type is "Tuple[builtins.int, builtins.str, fallback=__main__.C]" +reveal_type(c.y) # N: Revealed type is "Tuple[builtins.int, builtins.str, fallback=__main__.C]" +reveal_type(C.y) # N: Revealed type is "Tuple[builtins.int, builtins.str, fallback=__main__.C]" +C.x # E: Access to generic instance variables via class is ambiguous +[builtins fixtures/classmethod.pyi] + +[case testAccessingTypingSelfUnion] +from typing import Self, Union + +class C: + x: Self +class D: + x: int +x: Union[C, D] +reveal_type(x.x) # N: Revealed type is "Union[__main__.C, builtins.int]" diff --git a/test-data/unit/check-statements.test b/test-data/unit/check-statements.test index 71cc807197797..d1464423e90f0 100644 --- a/test-data/unit/check-statements.test +++ b/test-data/unit/check-statements.test @@ -1339,7 +1339,7 @@ from typing import Generator def g() -> Generator[int, None, None]: yield from () yield from (0, 1, 2) - yield from (0, "ERROR") # E: Incompatible types in "yield from" (actual type "object", expected type "int") + yield from (0, "ERROR") # E: Incompatible types in "yield from" (actual type "Union[int, str]", expected type "int") yield from ("ERROR",) # E: Incompatible types in "yield from" (actual type "str", expected type "int") [builtins fixtures/tuple.pyi] @@ -2307,3 +2307,38 @@ class Outer: class Inner: break # E: "break" outside loop [builtins fixtures/list.pyi] + +[case testCallableInstanceOverlapAllowed] +# flags: --warn-unreachable +from typing import Any, Callable, List + +class CAny: + def __call__(self) -> Any: ... +class CNone: + def __call__(self) -> None: ... +class CWrong: + def __call__(self, x: int) -> None: ... + +def describe(func: Callable[[], None]) -> str: + if isinstance(func, CAny): + return "CAny" + elif isinstance(func, CNone): + return "CNone" + elif isinstance(func, CWrong): + return "CWrong" # E: Statement is unreachable + else: + return "other" + +class C(CAny): + def __call__(self) -> None: ... + +def f(): + pass + +describe(CAny()) +describe(C()) +describe(CNone()) +describe(CWrong()) # E: Argument 1 to "describe" has incompatible type "CWrong"; expected "Callable[[], None]" \ + # N: "CWrong.__call__" has type "Callable[[Arg(int, 'x')], None]" +describe(f) +[builtins fixtures/isinstancelist.pyi] diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test index ad4893c2890ab..bf36977b56e36 100644 --- a/test-data/unit/check-tuples.test +++ b/test-data/unit/check-tuples.test @@ -1408,8 +1408,8 @@ y = "" reveal_type(t[x]) # N: Revealed type is "Union[builtins.int, builtins.str]" t[y] # E: No overload variant of "__getitem__" of "tuple" matches argument type "str" \ # N: Possible overload variants: \ - # N: def __getitem__(self, int, /) -> object \ - # N: def __getitem__(self, slice, /) -> Tuple[object, ...] + # N: def __getitem__(self, int, /) -> Union[int, str] \ + # N: def __getitem__(self, slice, /) -> Tuple[Union[int, str], ...] [builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-type-aliases.test b/test-data/unit/check-type-aliases.test index f77c3c1c34e25..6f9e9eda1d024 100644 --- a/test-data/unit/check-type-aliases.test +++ b/test-data/unit/check-type-aliases.test @@ -93,11 +93,9 @@ T = TypeVar('T') A = Tuple[T, T] if int(): - A = Union[T, int] # E: Cannot assign multiple types to name "A" without an explicit "Type[...]" annotation \ - # E: Value of type "int" is not indexable - # the second error is because of `Union = 0` in lib-stub/typing.pyi + A = Union[T, int] # E: Cannot assign multiple types to name "A" without an explicit "Type[...]" annotation [builtins fixtures/tuple.pyi] -[out] +[typing fixtures/typing-full.pyi] [case testProhibitUsingVariablesAsTypesAndAllowAliasesAsTypes] @@ -967,6 +965,7 @@ a: A b: B reveal_type(a) # N: Revealed type is "Union[builtins.list[Any], builtins.int]" reveal_type(b) # N: Revealed type is "Union[builtins.int, builtins.list[Any]]" +[builtins fixtures/type.pyi] [case testValidTypeAliasValues] from typing import TypeVar, Generic, List @@ -1075,11 +1074,15 @@ x: TestType = 42 y: TestType = 'a' z: TestType = object() # E: Incompatible types in assignment (expression has type "object", variable has type "Union[int, str]") +reveal_type(TestType) # N: Revealed type is "typing.TypeAliasType" +TestType() # E: "TypeAliasType" not callable + class A: ClassAlias = TypeAliasType("ClassAlias", int) xc: A.ClassAlias = 1 yc: A.ClassAlias = "" # E: Incompatible types in assignment (expression has type "str", variable has type "int") [builtins fixtures/tuple.pyi] +[typing fixtures/typing-full.pyi] [case testTypeAliasTypeInvalid] from typing_extensions import TypeAliasType @@ -1094,6 +1097,7 @@ T3 = TypeAliasType("T3", -1) # E: Invalid type: try using Literal[-1] instead? t3: T3 reveal_type(t3) # N: Revealed type is "Any" [builtins fixtures/tuple.pyi] +[typing fixtures/typing-full.pyi] [case testTypeAliasTypeGeneric] from typing import Callable, Dict, Generic, TypeVar, Tuple @@ -1140,6 +1144,7 @@ ParamAlias2 = TypeAliasType("ParamAlias2", G[P, T], type_params=(P, T)) xp: ParamAlias2[[int], str] reveal_type(xp) # N: Revealed type is "__main__.G[[builtins.int], builtins.str]" [builtins fixtures/dict.pyi] +[typing fixtures/typing-full.pyi] [case testTypeAliasTypeInvalidGeneric] from typing_extensions import TypeAliasType, TypeVarTuple, ParamSpec @@ -1195,11 +1200,11 @@ unbound_tvt_alias2: Ta10[int] # E: Bad number of arguments for type alias, expe reveal_type(unbound_tvt_alias2) # N: Revealed type is "def (*Any) -> builtins.str" class A(Generic[T]): - Ta11 = TypeAliasType("Ta11", Dict[str, T], type_params=(T,)) # E: Can't use bound type variable "T" to define generic alias \ - # E: "T" is a type variable and only valid in type context + Ta11 = TypeAliasType("Ta11", Dict[str, T], type_params=(T,)) # E: Can't use bound type variable "T" to define generic alias x: A.Ta11 = {"a": 1} reveal_type(x) # N: Revealed type is "builtins.dict[builtins.str, Any]" [builtins fixtures/dict.pyi] +[typing fixtures/typing-full.pyi] [case testTypeAliasTypeNoUnpackInTypeParams311] # flags: --python-version 3.11 @@ -1213,3 +1218,14 @@ Ta2 = TypeAliasType("Ta2", None, type_params=(Unpack[Ts],)) # E: Free type vari # N: Don't Unpack type variables in type_params [builtins fixtures/tuple.pyi] + +[case testAliasInstanceNameClash] +from lib import func +class A: ... +func(A()) # E: Argument 1 to "func" has incompatible type "__main__.A"; expected "lib.A" +[file lib.py] +from typing import List, Union + +A = Union[int, List[A]] +def func(x: A) -> int: ... +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-type-object-type-inference.test b/test-data/unit/check-type-object-type-inference.test index baeca1e22ac7c..5a4afa0c92481 100644 --- a/test-data/unit/check-type-object-type-inference.test +++ b/test-data/unit/check-type-object-type-inference.test @@ -2,6 +2,7 @@ # flags: --python-version 3.9 from typing import TypeVar, Generic, Type from abc import abstractmethod +import types # Explicitly bring in stubs for 'types' T = TypeVar('T') class E(Generic[T]): @@ -37,5 +38,5 @@ def i(f: F): f.f(tuple[int,tuple[int,str]]).e( (27,(28,'z')) ) # OK reveal_type(f.f(tuple[int,tuple[int,str]]).e) # N: Revealed type is "def (t: Tuple[builtins.int, Tuple[builtins.int, builtins.str]]) -> builtins.str" -x = tuple[int,str][str] # E: The type "Type[Tuple[Any, ...]]" is not generic and not indexable +x = tuple[int,str][str] # False negative [builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test index 09b86e4afd2dd..fa77d98e4a34a 100644 --- a/test-data/unit/check-typeddict.test +++ b/test-data/unit/check-typeddict.test @@ -3487,3 +3487,66 @@ class A(Generic[T]): return self.a(x=1) [typing fixtures/typing-full.pyi] [builtins fixtures/tuple.pyi] + +[case testNameUndefinedErrorDoesNotLoseUnpackedKWArgsInformation] +from typing import overload +from typing_extensions import TypedDict, Unpack + +class TD(TypedDict, total=False): + x: int + y: str + +@overload +def f(self, *, x: int) -> None: ... +@overload +def f(self, *, y: str) -> None: ... +def f(self, **kwargs: Unpack[TD]) -> None: + z # E: Name "z" is not defined + +@overload +def g(self, *, x: float) -> None: ... +@overload +def g(self, *, y: str) -> None: ... +def g(self, **kwargs: Unpack[TD]) -> None: # E: Overloaded function implementation does not accept all possible arguments of signature 1 + z # E: Name "z" is not defined + +class A: + def f(self, *, x: int) -> None: ... + def g(self, *, x: float) -> None: ... +class B(A): + def f(self, **kwargs: Unpack[TD]) -> None: + z # E: Name "z" is not defined + def g(self, **kwargs: Unpack[TD]) -> None: # E: Signature of "g" incompatible with supertype "A" \ + # N: Superclass: \ + # N: def g(self, *, x: float) -> None \ + # N: Subclass: \ + # N: def g(*, x: int = ..., y: str = ...) -> None + z # E: Name "z" is not defined +reveal_type(B.f) # N: Revealed type is "def (self: __main__.B, **kwargs: Unpack[TypedDict('__main__.TD', {'x'?: builtins.int, 'y'?: builtins.str})])" +B().f(x=1.0) # E: Argument "x" to "f" of "B" has incompatible type "float"; expected "int" +[builtins fixtures/primitives.pyi] + +[case testTypedDictUnpackWithParamSpecInference] +from typing import TypeVar, ParamSpec, Callable +from typing_extensions import TypedDict, Unpack + +P = ParamSpec("P") +R = TypeVar("R") + +def run(func: Callable[P, R], *args: P.args, **kwargs: P.kwargs) -> R: ... + +class Params(TypedDict): + temperature: float + +def test(temperature: int) -> None: ... +def test2(temperature: float, other: str) -> None: ... + +class Test: + def f(self, c: Callable[..., None], **params: Unpack[Params]) -> None: + run(c, **params) + def g(self, **params: Unpack[Params]) -> None: + run(test, **params) # E: Argument "temperature" to "run" has incompatible type "float"; expected "int" + def h(self, **params: Unpack[Params]) -> None: + run(test2, other="yes", **params) + run(test2, other=0, **params) # E: Argument "other" to "run" has incompatible type "int"; expected "str" +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index f704e3c5c7136..ea692244597c2 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -24,7 +24,7 @@ def g(a: Tuple[Unpack[Ts]], b: Tuple[Unpack[Ts]]) -> Tuple[Unpack[Ts]]: reveal_type(g(args, args)) # N: Revealed type is "Tuple[builtins.int, builtins.str]" reveal_type(g(args, args2)) # N: Revealed type is "Tuple[builtins.int, builtins.str]" -reveal_type(g(args, args3)) # N: Revealed type is "builtins.tuple[builtins.object, ...]" +reveal_type(g(args, args3)) # N: Revealed type is "builtins.tuple[Union[builtins.int, builtins.str], ...]" reveal_type(g(any, any)) # N: Revealed type is "builtins.tuple[Any, ...]" [builtins fixtures/tuple.pyi] @@ -881,7 +881,8 @@ y: B z: C reveal_type(x) # N: Revealed type is "Any" reveal_type(y) # N: Revealed type is "Any" -reveal_type(z) # N: Revealed type is "Tuple[builtins.int, Unpack[Any]]" +reveal_type(z) # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[Any, ...]]]" + [builtins fixtures/tuple.pyi] [case testInferenceAgainstGenericVariadicWithBadType] @@ -988,7 +989,7 @@ from typing_extensions import Unpack def pipeline(*xs: Unpack[Tuple[int, Unpack[Tuple[float, ...]], bool]]) -> None: for x in xs: - reveal_type(x) # N: Revealed type is "builtins.float" + reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.float]" [builtins fixtures/tuple.pyi] [case testFixedUnpackItemInInstanceArguments] @@ -1099,9 +1100,10 @@ reveal_type(t.fn) # N: Revealed type is "def (builtins.int, builtins.int, built [builtins fixtures/tuple.pyi] [case testVariadicNamedTuple] -from typing import Tuple, Callable, NamedTuple, Generic +from typing import Tuple, Callable, NamedTuple, Generic, TypeVar from typing_extensions import TypeVarTuple, Unpack +T = TypeVar("T") Ts = TypeVarTuple("Ts") class A(NamedTuple, Generic[Unpack[Ts], T]): fn: Callable[[Unpack[Ts]], None] @@ -1128,9 +1130,10 @@ nt2 = A(fn=bad, val=42) # E: Argument "fn" to "A" has incompatible type "Callab [builtins fixtures/tuple.pyi] [case testVariadicTypedDict] -from typing import Tuple, Callable, Generic +from typing import Tuple, Callable, Generic, TypeVar from typing_extensions import TypeVarTuple, Unpack, TypedDict +T = TypeVar("T") Ts = TypeVarTuple("Ts") class A(TypedDict, Generic[Unpack[Ts], T]): fn: Callable[[Unpack[Ts]], None] @@ -1712,7 +1715,7 @@ vt: Tuple[int, Unpack[Tuple[float, ...]], int] reveal_type(vt + (1, 2)) # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.int, Literal[1]?, Literal[2]?]" reveal_type((1, 2) + vt) # N: Revealed type is "Tuple[Literal[1]?, Literal[2]?, builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.int]" -reveal_type(vt + vt) # N: Revealed type is "builtins.tuple[builtins.float, ...]" +reveal_type(vt + vt) # N: Revealed type is "builtins.tuple[Union[builtins.int, builtins.float], ...]" reveal_type(vtf + (1, 2)) # N: Revealed type is "Tuple[Unpack[builtins.tuple[builtins.float, ...]], Literal[1]?, Literal[2]?]" reveal_type((1, 2) + vtf) # N: Revealed type is "Tuple[Literal[1]?, Literal[2]?, Unpack[builtins.tuple[builtins.float, ...]]]" @@ -1807,6 +1810,63 @@ def test(a: Tuple[int, str], b: Tuple[bool], c: Tuple[bool, ...]): reveal_type(add(b, c)) # N: Revealed type is "builtins.tuple[builtins.bool, ...]" [builtins fixtures/tuple.pyi] +[case testTypeVarTupleOverloadOverlap] +from typing import Union, overload, Tuple +from typing_extensions import Unpack + +class Int(int): ... + +A = Tuple[int, Unpack[Tuple[int, ...]]] +B = Tuple[int, Unpack[Tuple[str, ...]]] + +@overload +def f(arg: A) -> int: ... +@overload +def f(arg: B) -> str: ... +def f(arg: Union[A, B]) -> Union[int, str]: + ... + +A1 = Tuple[int, Unpack[Tuple[Int, ...]]] +B1 = Tuple[Unpack[Tuple[Int, ...]], int] + +@overload +def f1(arg: A1) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +@overload +def f1(arg: B1) -> str: ... +def f1(arg: Union[A1, B1]) -> Union[int, str]: + ... + +A2 = Tuple[int, int, int] +B2 = Tuple[int, Unpack[Tuple[int, ...]]] + +@overload +def f2(arg: A2) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +@overload +def f2(arg: B2) -> str: ... +def f2(arg: Union[A2, B2]) -> Union[int, str]: + ... + +A3 = Tuple[int, int, int] +B3 = Tuple[int, Unpack[Tuple[str, ...]]] + +@overload +def f3(arg: A3) -> int: ... +@overload +def f3(arg: B3) -> str: ... +def f3(arg: Union[A3, B3]) -> Union[int, str]: + ... + +A4 = Tuple[int, int, Unpack[Tuple[int, ...]]] +B4 = Tuple[int] + +@overload +def f4(arg: A4) -> int: ... +@overload +def f4(arg: B4) -> str: ... +def f4(arg: Union[A4, B4]) -> Union[int, str]: + ... +[builtins fixtures/tuple.pyi] + [case testTypeVarTupleIndexOldStyleNonNormalizedAndNonLiteral] from typing import Any, Tuple from typing_extensions import Unpack @@ -2320,3 +2380,83 @@ def a2(x: Array[int, str]) -> None: reveal_type(func(x, 2, "Hello", True)) # E: Cannot infer type argument 1 of "func" \ # N: Revealed type is "builtins.tuple[Any, ...]" [builtins fixtures/tuple.pyi] + +[case testTypeVarTupleTypeApplicationOverload] +from typing import Generic, TypeVar, TypeVarTuple, Unpack, overload, Callable + +T = TypeVar("T") +T1 = TypeVar("T1") +T2 = TypeVar("T2") +T3 = TypeVar("T3") +Ts = TypeVarTuple("Ts") + +class C(Generic[T, Unpack[Ts]]): + @overload + def __init__(self, f: Callable[[Unpack[Ts]], T]) -> None: ... + @overload + def __init__(self, f: Callable[[T1, T2, T3, Unpack[Ts]], T], a: T1, b: T2, c: T3) -> None: ... + def __init__(self, f, *args, **kwargs) -> None: + ... + +reveal_type(C[int, str]) # N: Revealed type is "Overload(def (f: def (builtins.str) -> builtins.int) -> __main__.C[builtins.int, builtins.str], def [T1, T2, T3] (f: def (T1`-1, T2`-2, T3`-3, builtins.str) -> builtins.int, a: T1`-1, b: T2`-2, c: T3`-3) -> __main__.C[builtins.int, builtins.str])" +Alias = C[int, str] + +def f(x: int, y: int, z: int, t: int) -> str: ... +x = C(f, 0, 0, "hm") # E: Argument 1 to "C" has incompatible type "Callable[[int, int, int, int], str]"; expected "Callable[[int, int, str, int], str]" +reveal_type(x) # N: Revealed type is "__main__.C[builtins.str, builtins.int]" +reveal_type(C(f)) # N: Revealed type is "__main__.C[builtins.str, builtins.int, builtins.int, builtins.int, builtins.int]" +C[()] # E: At least 1 type argument(s) expected, none given +[builtins fixtures/tuple.pyi] + +[case testTypeVarTupleAgainstParamSpecActualSuccess] +from typing import Generic, TypeVar, TypeVarTuple, Unpack, Callable, Tuple, List +from typing_extensions import ParamSpec + +R = TypeVar("R") +P = ParamSpec("P") + +class CM(Generic[R]): ... +def cm(fn: Callable[P, R]) -> Callable[P, CM[R]]: ... + +Ts = TypeVarTuple("Ts") +@cm +def test(*args: Unpack[Ts]) -> Tuple[Unpack[Ts]]: ... + +reveal_type(test) # N: Revealed type is "def [Ts] (*args: Unpack[Ts`-1]) -> __main__.CM[Tuple[Unpack[Ts`-1]]]" +reveal_type(test(1, 2, 3)) # N: Revealed type is "__main__.CM[Tuple[Literal[1]?, Literal[2]?, Literal[3]?]]" +[builtins fixtures/tuple.pyi] + +[case testTypeVarTupleAgainstParamSpecActualFailedNoCrash] +from typing import Generic, TypeVar, TypeVarTuple, Unpack, Callable, Tuple, List +from typing_extensions import ParamSpec + +R = TypeVar("R") +P = ParamSpec("P") + +class CM(Generic[R]): ... +def cm(fn: Callable[P, List[R]]) -> Callable[P, CM[R]]: ... + +Ts = TypeVarTuple("Ts") +@cm # E: Argument 1 to "cm" has incompatible type "Callable[[VarArg(Unpack[Ts])], Tuple[Unpack[Ts]]]"; expected "Callable[[VarArg(Never)], List[Never]]" +def test(*args: Unpack[Ts]) -> Tuple[Unpack[Ts]]: ... + +reveal_type(test) # N: Revealed type is "def (*args: Never) -> __main__.CM[Never]" +[builtins fixtures/tuple.pyi] + +[case testTypeVarTupleAgainstParamSpecActualPrefix] +from typing import Generic, TypeVar, TypeVarTuple, Unpack, Callable, Tuple, List +from typing_extensions import ParamSpec, Concatenate + +R = TypeVar("R") +P = ParamSpec("P") +T = TypeVar("T") + +class CM(Generic[R]): ... +def cm(fn: Callable[Concatenate[T, P], R]) -> Callable[Concatenate[List[T], P], CM[R]]: ... + +Ts = TypeVarTuple("Ts") +@cm +def test(x: T, *args: Unpack[Ts]) -> Tuple[T, Unpack[Ts]]: ... + +reveal_type(test) # N: Revealed type is "def [T, Ts] (builtins.list[T`2], *args: Unpack[Ts`-2]) -> __main__.CM[Tuple[T`2, Unpack[Ts`-2]]]" +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-union-or-syntax.test b/test-data/unit/check-union-or-syntax.test index 85e268f348f01..fcf679fff4014 100644 --- a/test-data/unit/check-union-or-syntax.test +++ b/test-data/unit/check-union-or-syntax.test @@ -189,7 +189,7 @@ def g(x: int | str | tuple[int, str] | C) -> None: # flags: --python-version 3.9 from typing import Union def f(x: Union[int, str, None]) -> None: - if isinstance(x, int | str): # E: Unsupported left operand type for | ("Type[int]") + if isinstance(x, int | str): reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.str]" else: reveal_type(x) # N: Revealed type is "None" @@ -207,6 +207,36 @@ foo: ReadableBuffer [file was_mmap.pyi] from was_builtins import * class mmap: ... +[builtins fixtures/type.pyi] + +[case testTypeAliasWithNewUnionIsInstance] +# flags: --python-version 3.10 +SimpleAlias = int | str + +def foo(x: int | str | tuple): + if isinstance(x, SimpleAlias): + reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.str]" + else: + reveal_type(x) # N: Revealed type is "builtins.tuple[Any, ...]" + +ParameterizedAlias = str | list[str] + +# these are false negatives: +isinstance(5, str | list[str]) +isinstance(5, ParameterizedAlias) +[builtins fixtures/type.pyi] + +[case testIsInstanceUnionNone] +# flags: --python-version 3.10 +def foo(value: str | bool | None): + assert not isinstance(value, str | None) + reveal_type(value) # N: Revealed type is "builtins.bool" + +def bar(value: object): + assert isinstance(value, str | None) + reveal_type(value) # N: Revealed type is "Union[builtins.str, None]" +[builtins fixtures/type.pyi] + # TODO: Get this test to pass [case testImplicit604TypeAliasWithCyclicImportNotInStub-xfail] diff --git a/test-data/unit/check-unions.test b/test-data/unit/check-unions.test index 2ca2f1ba9eb3f..329896f7a1a7b 100644 --- a/test-data/unit/check-unions.test +++ b/test-data/unit/check-unions.test @@ -1289,3 +1289,60 @@ x: str = a_class_or_none.field a_or_none: Optional[A] y: int = a_or_none.field [builtins fixtures/list.pyi] + +[case testLargeUnionsShort] +from typing import Union + +class C1: ... +class C2: ... +class C3: ... +class C4: ... +class C5: ... +class C6: ... +class C7: ... +class C8: ... +class C9: ... +class C10: ... +class C11: ... + +u: Union[C1, C2, C3, C4, C5, C6, C7, C8, C9, C10, C11] +x: int = u # E: Incompatible types in assignment (expression has type "Union[C1, C2, C3, C4, C5, <6 more items>]", variable has type "int") + +[case testLargeUnionsLongIfNeeded] +from typing import Union + +class C1: ... +class C2: ... +class C3: ... +class C4: ... +class C5: ... +class C6: ... +class C7: ... +class C8: ... +class C9: ... +class C10: ... + +x: Union[C1, C2, C3, C4, C5, C6, C7, C8, C9, C10, int] +y: Union[C1, C2, C3, C4, C5, C6, C7, C8, C9, C10, str] +x = y # E: Incompatible types in assignment (expression has type "Union[C1, C2, C3, C4, C5, C6, C7, C8, C9, C10, str]", variable has type "Union[C1, C2, C3, C4, C5, C6, C7, C8, C9, C10, int]") \ + # N: Item in the first union not in the second: "str" + +[case testLargeUnionsNoneShown] +from typing import Union + +class C1: ... +class C2: ... +class C3: ... +class C4: ... +class C5: ... +class C6: ... +class C7: ... +class C8: ... +class C9: ... +class C10: ... +class C11: ... + +x: Union[C1, C2, C3, C4, C5, C6, C7, C8, C9, C10, C11] +y: Union[C1, C2, C3, C4, C5, C6, C7, C8, C9, C10, C11, None] +x = y # E: Incompatible types in assignment (expression has type "Union[C1, C2, C3, C4, C5, <6 more items>, None]", variable has type "Union[C1, C2, C3, C4, C5, <6 more items>]") \ + # N: Item in the first union not in the second: "None" diff --git a/test-data/unit/deps.test b/test-data/unit/deps.test index d18b4aae963bf..3364dee6c6964 100644 --- a/test-data/unit/deps.test +++ b/test-data/unit/deps.test @@ -1431,3 +1431,25 @@ class B(A): -> m -> m -> m + +[case testPEP695TypeAliasDeps] +# flags: --enable-incomplete-feature=NewGenericSyntax --python-version=3.12 +from a import C, E +type A = C +type A2 = A +type A3 = E +[file a.py] +class C: pass +class D: pass +type E = D +[out] + -> m + -> m + -> m + -> m + -> m + -> m + -> m + -> m +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-full.pyi] diff --git a/test-data/unit/diff.test b/test-data/unit/diff.test index 8fc74868123e8..9212d902e8b21 100644 --- a/test-data/unit/diff.test +++ b/test-data/unit/diff.test @@ -1530,3 +1530,102 @@ class C: [out] __main__.C.get_by_team_and_id __main__.Optional + +[case testPEP695TypeAlias] +# flags: --enable-incomplete-feature=NewGenericSyntax --python-version=3.12 +from typing_extensions import TypeAlias, TypeAliasType +type A = int +type B = str +type C = int +D = int +E: TypeAlias = int +F = TypeAliasType("F", int) +G = TypeAliasType("G", int) +type H = int + +[file next.py] +# flags: --enable-incomplete-feature=NewGenericSyntax --python-version=3.12 +from typing_extensions import TypeAlias, TypeAliasType +type A = str +type B = str +type C[T] = int +type D = int +type E = int +type F = int +type G = str +type H[T] = int + +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-full.pyi] +[out] +__main__.A +__main__.C +__main__.D +__main__.E +__main__.G +__main__.H + +[case testPEP695TypeAlias2] +# flags: --enable-incomplete-feature=NewGenericSyntax --python-version=3.12 +type A[T: int] = list[T] +type B[T: int] = list[T] +type C[T: (int, str)] = list[T] +type D[T: (int, str)] = list[T] +type E[T: int] = list[T] +type F[T: (int, str)] = list[T] + +[file next.py] +# flags: --enable-incomplete-feature=NewGenericSyntax --python-version=3.12 +type A[T] = list[T] +type B[T: str] = list[T] +type C[T: (int, None)] = list[T] +type D[T] = list[T] +type E[T: int] = list[T] +type F[T: (int, str)] = list[T] + +[out] +__main__.A +__main__.B +__main__.C +__main__.D + +[case testPEP695GenericFunction] +# flags: --enable-incomplete-feature=NewGenericSyntax --python-version=3.12 +def f[T](x: T) -> T: + return x +def g[T](x: T, y: T) -> T: + return x +[file next.py] +# flags: --enable-incomplete-feature=NewGenericSyntax --python-version=3.12 +def f[T](x: T) -> T: + return x +def g[T, S](x: T, y: S) -> S: + return y +[out] +__main__.g + +[case testPEP695GenericClass] +# flags: --enable-incomplete-feature=NewGenericSyntax --python-version=3.12 +class C[T]: + pass +class D[T]: + pass +class E[T]: + pass +class F[T]: + def f(self, x: object) -> T: ... +[file next.py] +# flags: --enable-incomplete-feature=NewGenericSyntax --python-version=3.12 +class C[T]: + pass +class D[T: int]: + pass +class E: + pass +class F[T]: + def f(self, x: T) -> T: ... +[out] +__main__.D +__main__.E +__main__.F +__main__.F.f diff --git a/test-data/unit/fine-grained-python312.test b/test-data/unit/fine-grained-python312.test new file mode 100644 index 0000000000000..3970c8cacfbf7 --- /dev/null +++ b/test-data/unit/fine-grained-python312.test @@ -0,0 +1,82 @@ +[case testPEP695TypeAliasDep] +# flags: --enable-incomplete-feature=NewGenericSyntax +import m +def g() -> m.C: + return m.f() +[file m.py] +type C = int + +def f() -> int: + pass +[file m.py.2] +type C = str + +def f() -> int: + pass +[out] +== +main:4: error: Incompatible return value type (got "int", expected "str") + +[case testPEP695ChangeOldStyleToNewStyleTypeAlias] +# flags: --enable-incomplete-feature=NewGenericSyntax +from m import A +A() + +[file m.py] +A = int + +[file m.py.2] +type A = int +[typing fixtures/typing-full.pyi] +[builtins fixtures/tuple.pyi] +[out] +== +main:3: error: "TypeAliasType" not callable + +[case testPEP695VarianceChangesDueToDependency] +# flags: --enable-incomplete-feature=NewGenericSyntax +from a import C + +x: C[object] = C[int]() + +[file a.py] +from b import A + +class C[T]: + def f(self) -> A[T]: ... + +[file b.py] +class A[T]: + def f(self) -> T: ... + +[file b.py.2] +class A[T]: + def f(self) -> list[T]: ... + +[out] +== +main:4: error: Incompatible types in assignment (expression has type "C[int]", variable has type "C[object]") + +[case testPEP695TypeAliasChangesDueToDependency] +# flags: --enable-incomplete-feature=NewGenericSyntax +from a import A +x: A +x = 0 +x = '' + +[file a.py] +from b import B +type A = B[int, str] + +[file b.py] +from typing import Union as B + +[file b.py.2] +from builtins import tuple as B + +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-full.pyi] +[out] +== +main:4: error: Incompatible types in assignment (expression has type "int", variable has type "tuple[int, str]") +main:5: error: Incompatible types in assignment (expression has type "str", variable has type "tuple[int, str]") diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 9c379d8f60da5..2a652e50b1e6d 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -7138,7 +7138,7 @@ T = TypeVar('T', bound=str) a.py:2: error: No overload variant of "f" matches argument type "int" a.py:2: note: Possible overload variants: a.py:2: note: def f(x: C) -> None -a.py:2: note: def [c.T <: str] f(x: c.T) -> c.T +a.py:2: note: def [c.T: str] f(x: c.T) -> c.T [case testOverloadsGenericToNonGeneric] import a @@ -10380,6 +10380,7 @@ from b import C, D A = C | D a: A reveal_type(a) +[builtins fixtures/type.pyi] [file b.py] C = int diff --git a/test-data/unit/fixtures/isinstance.pyi b/test-data/unit/fixtures/isinstance.pyi index c1125c24b941e..12cef2035c2b8 100644 --- a/test-data/unit/fixtures/isinstance.pyi +++ b/test-data/unit/fixtures/isinstance.pyi @@ -7,6 +7,7 @@ class object: class type: def __init__(self, x) -> None: pass + def __or__(self, other: type) -> type: pass class tuple(Generic[T]): pass diff --git a/test-data/unit/fixtures/isinstance_python3_10.pyi b/test-data/unit/fixtures/isinstance_python3_10.pyi index 7c919a216bfbd..0918d10ab1ef1 100644 --- a/test-data/unit/fixtures/isinstance_python3_10.pyi +++ b/test-data/unit/fixtures/isinstance_python3_10.pyi @@ -7,15 +7,15 @@ T = TypeVar('T') class object: def __init__(self) -> None: pass -class type(Generic[T]): +class type: def __init__(self, x) -> None: pass - def __or__(self, x) -> types.Union: pass + def __or__(self, x) -> types.UnionType: pass class tuple(Generic[T]): pass class function: pass -def isinstance(x: object, t: Union[Type[object], Tuple[Type[object], ...], types.Union]) -> bool: pass +def isinstance(x: object, t: Union[Type[object], Tuple[Type[object], ...], types.UnionType]) -> bool: pass def issubclass(x: object, t: Union[Type[object], Tuple[Type[object], ...]]) -> bool: pass class int: diff --git a/test-data/unit/fixtures/tuple.pyi b/test-data/unit/fixtures/tuple.pyi index eb89de8c86ef4..3b62d7fc1513c 100644 --- a/test-data/unit/fixtures/tuple.pyi +++ b/test-data/unit/fixtures/tuple.pyi @@ -3,8 +3,8 @@ import _typeshed from typing import Iterable, Iterator, TypeVar, Generic, Sequence, Optional, overload, Tuple, Type -T = TypeVar("T") -Tco = TypeVar('Tco', covariant=True) +_T = TypeVar("_T") +_Tco = TypeVar('_Tco', covariant=True) class object: def __init__(self) -> None: pass @@ -12,17 +12,17 @@ class object: class type: def __init__(self, *a: object) -> None: pass def __call__(self, *a: object) -> object: pass -class tuple(Sequence[Tco], Generic[Tco]): - def __new__(cls: Type[T], iterable: Iterable[Tco] = ...) -> T: ... - def __iter__(self) -> Iterator[Tco]: pass +class tuple(Sequence[_Tco], Generic[_Tco]): + def __new__(cls: Type[_T], iterable: Iterable[_Tco] = ...) -> _T: ... + def __iter__(self) -> Iterator[_Tco]: pass def __contains__(self, item: object) -> bool: pass @overload - def __getitem__(self, x: int) -> Tco: pass + def __getitem__(self, x: int) -> _Tco: pass @overload - def __getitem__(self, x: slice) -> Tuple[Tco, ...]: ... - def __mul__(self, n: int) -> Tuple[Tco, ...]: pass - def __rmul__(self, n: int) -> Tuple[Tco, ...]: pass - def __add__(self, x: Tuple[Tco, ...]) -> Tuple[Tco, ...]: pass + def __getitem__(self, x: slice) -> Tuple[_Tco, ...]: ... + def __mul__(self, n: int) -> Tuple[_Tco, ...]: pass + def __rmul__(self, n: int) -> Tuple[_Tco, ...]: pass + def __add__(self, x: Tuple[_Tco, ...]) -> Tuple[_Tco, ...]: pass def count(self, obj: object) -> int: pass class function: __name__: str @@ -40,13 +40,13 @@ class str: pass # For convenience class bytes: pass class bytearray: pass -class list(Sequence[T], Generic[T]): +class list(Sequence[_T], Generic[_T]): @overload - def __getitem__(self, i: int) -> T: ... + def __getitem__(self, i: int) -> _T: ... @overload - def __getitem__(self, s: slice) -> list[T]: ... + def __getitem__(self, s: slice) -> list[_T]: ... def __contains__(self, item: object) -> bool: ... - def __iter__(self) -> Iterator[T]: ... + def __iter__(self) -> Iterator[_T]: ... def isinstance(x: object, t: type) -> bool: pass diff --git a/test-data/unit/fixtures/type.pyi b/test-data/unit/fixtures/type.pyi index 39357a693638b..0d93b2e1fcd66 100644 --- a/test-data/unit/fixtures/type.pyi +++ b/test-data/unit/fixtures/type.pyi @@ -1,6 +1,8 @@ # builtins stub used in type-related test cases. from typing import Any, Generic, TypeVar, List, Union +import sys +import types T = TypeVar("T") S = TypeVar("S") @@ -11,7 +13,7 @@ class object: class list(Generic[T]): pass -class type(Generic[T]): +class type: __name__: str def __call__(self, *args: Any, **kwargs: Any) -> Any: pass def __or__(self, other: Union[type, None]) -> type: pass @@ -25,3 +27,9 @@ class bool: pass class int: pass class str: pass class ellipsis: pass +class float: pass + +if sys.version_info >= (3, 10): # type: ignore + def isinstance(obj: object, class_or_tuple: type | types.UnionType, /) -> bool: ... +else: + def isinstance(obj: object, class_or_tuple: type, /) -> bool: ... diff --git a/test-data/unit/fixtures/typing-async.pyi b/test-data/unit/fixtures/typing-async.pyi index 9897dfd0b2701..03728f8223162 100644 --- a/test-data/unit/fixtures/typing-async.pyi +++ b/test-data/unit/fixtures/typing-async.pyi @@ -10,7 +10,7 @@ from abc import abstractmethod, ABCMeta cast = 0 overload = 0 -Any = 0 +Any = object() Union = 0 Optional = 0 TypeVar = 0 @@ -125,3 +125,5 @@ class AsyncContextManager(Generic[T]): def __aenter__(self) -> Awaitable[T]: pass # Use Any because not all the precise types are in the fixtures. def __aexit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> Awaitable[Any]: pass + +class _SpecialForm: pass diff --git a/test-data/unit/fixtures/typing-full.pyi b/test-data/unit/fixtures/typing-full.pyi index f7da75fa4cd0b..8e0116aab1c29 100644 --- a/test-data/unit/fixtures/typing-full.pyi +++ b/test-data/unit/fixtures/typing-full.pyi @@ -10,31 +10,37 @@ from abc import abstractmethod, ABCMeta class GenericMeta(type): pass -class _SpecialForm: ... -class TypeVar: ... +class _SpecialForm: + def __getitem__(self, index: Any) -> Any: ... + def __or__(self, other): ... + def __ror__(self, other): ... +class TypeVar: + def __init__(self, name, *args, bound=None): ... + def __or__(self, other): ... class ParamSpec: ... class TypeVarTuple: ... def cast(t, o): ... def assert_type(o, t): ... overload = 0 -Any = 0 -Union = 0 +Any = object() Optional = 0 Generic = 0 Protocol = 0 Tuple = 0 -Callable = 0 _promote = 0 Type = 0 no_type_check = 0 ClassVar = 0 Final = 0 -Literal = 0 TypedDict = 0 NoReturn = 0 NewType = 0 Self = 0 +Unpack = 0 +Callable: _SpecialForm +Union: _SpecialForm +Literal: _SpecialForm T = TypeVar('T') T_co = TypeVar('T_co', covariant=True) @@ -210,3 +216,6 @@ class TypeAliasType: def __init__( self, name: str, value: Any, *, type_params: Tuple[Union[TypeVar, ParamSpec, TypeVarTuple], ...] = () ) -> None: ... + + def __or__(self, other: Any) -> Any: ... + def __ror__(self, other: Any) -> Any: ... diff --git a/test-data/unit/fixtures/typing-medium.pyi b/test-data/unit/fixtures/typing-medium.pyi index c19c5d5d96e20..c722a9ddb12c8 100644 --- a/test-data/unit/fixtures/typing-medium.pyi +++ b/test-data/unit/fixtures/typing-medium.pyi @@ -8,7 +8,7 @@ cast = 0 overload = 0 -Any = 0 +Any = object() Union = 0 Optional = 0 TypeVar = 0 diff --git a/test-data/unit/fixtures/typing-namedtuple.pyi b/test-data/unit/fixtures/typing-namedtuple.pyi index f4744575fc092..bcdcfc44c3d21 100644 --- a/test-data/unit/fixtures/typing-namedtuple.pyi +++ b/test-data/unit/fixtures/typing-namedtuple.pyi @@ -1,6 +1,6 @@ TypeVar = 0 Generic = 0 -Any = 0 +Any = object() overload = 0 Type = 0 Literal = 0 @@ -26,3 +26,5 @@ class NamedTuple(tuple[Any, ...]): def __init__(self, typename: str, fields: Iterable[tuple[str, Any]] = ...) -> None: ... @overload def __init__(self, typename: str, fields: None = None, **kwargs: Any) -> None: ... + +class _SpecialForm: pass diff --git a/test-data/unit/fixtures/typing-override.pyi b/test-data/unit/fixtures/typing-override.pyi index 606ca63d4f0d8..e9d2dfcf55c45 100644 --- a/test-data/unit/fixtures/typing-override.pyi +++ b/test-data/unit/fixtures/typing-override.pyi @@ -1,6 +1,6 @@ TypeVar = 0 Generic = 0 -Any = 0 +Any = object() overload = 0 Type = 0 Literal = 0 @@ -21,5 +21,6 @@ class Mapping(Iterable[KT], Generic[KT, T_co]): def keys(self) -> Iterable[T]: pass # Approximate return type def __getitem__(self, key: T) -> T_co: pass - def override(__arg: T) -> T: ... + +class _SpecialForm: pass diff --git a/test-data/unit/fixtures/typing-typeddict-iror.pyi b/test-data/unit/fixtures/typing-typeddict-iror.pyi index e452c8497109d..845ac6cf208fd 100644 --- a/test-data/unit/fixtures/typing-typeddict-iror.pyi +++ b/test-data/unit/fixtures/typing-typeddict-iror.pyi @@ -12,7 +12,7 @@ from abc import ABCMeta cast = 0 assert_type = 0 overload = 0 -Any = 0 +Any = object() Union = 0 Optional = 0 TypeVar = 0 @@ -64,3 +64,5 @@ class _TypedDict(Mapping[str, object]): def __ror__(self, __value: dict[str, Any]) -> dict[str, object]: ... # supposedly incompatible definitions of __or__ and __ior__ def __ior__(self, __value: Self) -> Self: ... # type: ignore[misc] + +class _SpecialForm: pass diff --git a/test-data/unit/fixtures/typing-typeddict.pyi b/test-data/unit/fixtures/typing-typeddict.pyi index 24a2f13289812..d136ac4ab8bee 100644 --- a/test-data/unit/fixtures/typing-typeddict.pyi +++ b/test-data/unit/fixtures/typing-typeddict.pyi @@ -11,7 +11,7 @@ from abc import ABCMeta cast = 0 assert_type = 0 overload = 0 -Any = 0 +Any = object() Union = 0 Optional = 0 TypeVar = 0 @@ -71,3 +71,5 @@ class _TypedDict(Mapping[str, object]): def pop(self, k: NoReturn, default: T = ...) -> object: ... def update(self: T, __m: T) -> None: ... def __delitem__(self, k: NoReturn) -> None: ... + +class _SpecialForm: pass diff --git a/test-data/unit/lib-stub/enum.pyi b/test-data/unit/lib-stub/enum.pyi index 11adfc5979552..0e0b8e025d9f2 100644 --- a/test-data/unit/lib-stub/enum.pyi +++ b/test-data/unit/lib-stub/enum.pyi @@ -48,3 +48,13 @@ class auto(IntFlag): # It is python-3.11+ only: class StrEnum(str, Enum): def __new__(cls: Type[_T], value: str | _T) -> _T: ... + +# It is python-3.11+ only: +class nonmember(Generic[_T]): + value: _T + def __init__(self, value: _T) -> None: ... + +# It is python-3.11+ only: +class member(Generic[_T]): + value: _T + def __init__(self, value: _T) -> None: ... diff --git a/test-data/unit/lib-stub/types.pyi b/test-data/unit/lib-stub/types.pyi index 012fd85033779..3f713c31e4174 100644 --- a/test-data/unit/lib-stub/types.pyi +++ b/test-data/unit/lib-stub/types.pyi @@ -9,9 +9,13 @@ class ModuleType: __file__: str def __getattr__(self, name: str) -> Any: pass -if sys.version_info >= (3, 10): - class Union: - def __or__(self, x) -> Union: ... +class GenericAlias: + def __or__(self, o): ... + def __ror__(self, o): ... +if sys.version_info >= (3, 10): class NoneType: ... + + class UnionType: + def __or__(self, x) -> UnionType: ... diff --git a/test-data/unit/lib-stub/typing.pyi b/test-data/unit/lib-stub/typing.pyi index 5f458ca687c0f..3cb164140883d 100644 --- a/test-data/unit/lib-stub/typing.pyi +++ b/test-data/unit/lib-stub/typing.pyi @@ -11,7 +11,7 @@ cast = 0 assert_type = 0 overload = 0 -Any = 0 +Any = object() Union = 0 Optional = 0 TypeVar = 0 @@ -63,3 +63,5 @@ class Coroutine(Awaitable[V], Generic[T, U, V]): pass def final(meth: T) -> T: pass def reveal_type(__obj: T) -> T: pass + +class _SpecialForm: pass diff --git a/test-data/unit/parse-python312.test b/test-data/unit/parse-python312.test index 28204ccd647bb..90ee96f38deb4 100644 --- a/test-data/unit/parse-python312.test +++ b/test-data/unit/parse-python312.test @@ -7,9 +7,12 @@ MypyFile:1( NameExpr(A) TypeParam( T) - IndexExpr:2( - NameExpr(C) - NameExpr(T)))) + LambdaExpr:2( + Block:-1( + ReturnStmt:2( + IndexExpr:2( + NameExpr(C) + NameExpr(T))))))) [case testPEP695GenericFunction] # mypy: enable-incomplete-feature=NewGenericSyntax diff --git a/test-data/unit/plugins/add_method.py b/test-data/unit/plugins/add_method.py new file mode 100644 index 0000000000000..f3a7ebdb95ed7 --- /dev/null +++ b/test-data/unit/plugins/add_method.py @@ -0,0 +1,23 @@ +from __future__ import annotations + +from typing import Callable + +from mypy.plugin import ClassDefContext, Plugin +from mypy.plugins.common import add_method +from mypy.types import NoneType + + +class AddOverrideMethodPlugin(Plugin): + def get_class_decorator_hook_2(self, fullname: str) -> Callable[[ClassDefContext], bool] | None: + if fullname == "__main__.inject_foo": + return add_extra_methods_hook + return None + + +def add_extra_methods_hook(ctx: ClassDefContext) -> bool: + add_method(ctx, "foo_implicit", [], NoneType()) + return True + + +def plugin(version: str) -> type[AddOverrideMethodPlugin]: + return AddOverrideMethodPlugin diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index a76d3abd71148..222430c3ef550 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -444,7 +444,7 @@ False [case testOverlappingOperatorMethods] class X: pass class A: - def __add__(self, x) -> int: + def __add__(self, x: object) -> int: if isinstance(x, X): return 1 return NotImplemented @@ -1606,8 +1606,8 @@ class Foo(Enum): Bar: Foo = Callable[[str], None] Baz: Foo = Callable[[Dict[str, "Missing"]], None] [out] -_testEnumValueWithPlaceholderNodeType.py:5: error: Incompatible types in assignment (expression has type "object", variable has type "Foo") -_testEnumValueWithPlaceholderNodeType.py:6: error: Incompatible types in assignment (expression has type "object", variable has type "Foo") +_testEnumValueWithPlaceholderNodeType.py:5: error: Incompatible types in assignment (expression has type "", variable has type "Foo") +_testEnumValueWithPlaceholderNodeType.py:6: error: Incompatible types in assignment (expression has type "", variable has type "Foo") _testEnumValueWithPlaceholderNodeType.py:6: error: Name "Missing" is not defined [case testTypeshedRecursiveTypesExample] @@ -1781,9 +1781,9 @@ C = str | int D: TypeAlias = str | int [out] _testTypeAliasNotSupportedWithNewStyleUnion.py:3: error: Invalid type alias: expression is not a valid type -_testTypeAliasNotSupportedWithNewStyleUnion.py:3: error: The type "Type[type]" is not generic and not indexable +_testTypeAliasNotSupportedWithNewStyleUnion.py:3: error: Unsupported left operand type for | ("") _testTypeAliasNotSupportedWithNewStyleUnion.py:4: error: Invalid type alias: expression is not a valid type -_testTypeAliasNotSupportedWithNewStyleUnion.py:4: error: The type "Type[type]" is not generic and not indexable +_testTypeAliasNotSupportedWithNewStyleUnion.py:4: error: Unsupported left operand type for | ("Type[str]") _testTypeAliasNotSupportedWithNewStyleUnion.py:5: error: Invalid type alias: expression is not a valid type _testTypeAliasNotSupportedWithNewStyleUnion.py:5: error: Unsupported left operand type for | ("Type[str]") _testTypeAliasNotSupportedWithNewStyleUnion.py:6: error: Invalid type alias: expression is not a valid type @@ -1942,13 +1942,13 @@ class Bar(Generic[P]): ... def bad(foo: Foo[[int]], bar: Bar[[int]]) -> bool: return foo == bar -def good1(foo1: Foo[[int]], foo2: Foo[[str]]) -> bool: +def bad1(foo1: Foo[[int]], foo2: Foo[[str]]) -> bool: return foo1 == foo2 -def good2(foo1: Foo[[int, str]], foo2: Foo[[int, bytes]]) -> bool: +def bad2(foo1: Foo[[int, str]], foo2: Foo[[int, bytes]]) -> bool: return foo1 == foo2 -def good3(foo1: Foo[[int]], foo2: Foo[[int, int]]) -> bool: +def bad3(foo1: Foo[[int]], foo2: Foo[[int, int]]) -> bool: return foo1 == foo2 def good4(foo1: Foo[[int]], foo2: Foo[[int]]) -> bool: @@ -1971,6 +1971,9 @@ def good9(foo1: Foo[Concatenate[int, P]], foo2: Foo[[int, str, bytes]], *args: P [out] _testStrictEqualitywithParamSpec.py:11: error: Non-overlapping equality check (left operand type: "Foo[[int]]", right operand type: "Bar[[int]]") +_testStrictEqualitywithParamSpec.py:14: error: Non-overlapping equality check (left operand type: "Foo[[int]]", right operand type: "Foo[[str]]") +_testStrictEqualitywithParamSpec.py:17: error: Non-overlapping equality check (left operand type: "Foo[[int, str]]", right operand type: "Foo[[int, bytes]]") +_testStrictEqualitywithParamSpec.py:20: error: Non-overlapping equality check (left operand type: "Foo[[int]]", right operand type: "Foo[[int, int]]") [case testInferenceOfDunderDictOnClassObjects] class Foo: ... @@ -2117,3 +2120,81 @@ def func( a2 = action # Error [out] _testPEP695VarianceInference.py:17: error: Incompatible types in assignment (expression has type "Job[None]", variable has type "Job[int]") + +[case testPEP695TypeAliasWithDifferentTargetTypes] +# flags: --python-version=3.12 --enable-incomplete-feature=NewGenericSyntax +from typing import Any, Callable, List, Literal, TypedDict, overload, TypeAlias, TypeVar, Never + +class C[T]: pass + +class O[T]: + @overload + def __init__(self) -> None: ... + @overload + def __init__(self, x: int) -> None: ... + def __init__(self, x: int = 0) -> None: + pass + +class TD(TypedDict): + x: int + +S = TypeVar("S") +A = list[S] +B: TypeAlias = list[S] + +type A1 = type[int] +type A2 = type[int] | None +type A3 = None | type[int] +type A4 = type[Any] +type A5 = type[C] | None +type A6 = None | type[C] +type A7 = type[O] | None +type A8 = None | type[O] + +type B1[**P, R] = Callable[P, R] | None +type B2[**P, R] = None | Callable[P, R] +type B3 = Callable[[str], int] +type B4 = Callable[..., int] + +type C1 = A1 | None +type C2 = None | A1 + +type D1 = Any | None +type D2 = None | Any + +type E1 = List[int] +type E2 = List[int] | None +type E3 = None | List[int] + +type F1 = Literal[1] +type F2 = Literal['x'] | None +type F3 = None | Literal[True] + +type G1 = tuple[int, Any] +type G2 = tuple[int, Any] | None +type G3 = None | tuple[int, Any] + +type H1 = TD +type H2 = TD | None +type H3 = None | TD + +type I1 = C[int] +type I2 = C[Any] | None +type I3 = None | C[TD] +type I4 = O[int] | None +type I5 = None | O[int] + +type J1[T] = T | None +type J2[T] = None | T +type J3[*Ts] = tuple[*Ts] +type J4[T] = J1[T] | None +type J5[T] = None | J1[T] +type J6[*Ts] = J3[*Ts] | None + +type K1 = A[int] | None +type K2 = None | A[int] +type K3 = B[int] | None +type K4 = None | B[int] + +type L1 = Never +type L2 = list[Never] diff --git a/test-data/unit/semanal-classes.test b/test-data/unit/semanal-classes.test index 951791e234901..b14358509f85a 100644 --- a/test-data/unit/semanal-classes.test +++ b/test-data/unit/semanal-classes.test @@ -585,7 +585,7 @@ MypyFile:1( TupleType( Tuple[builtins.int, builtins.str]) BaseType( - builtins.tuple[builtins.object, ...]) + builtins.tuple[Union[builtins.int, builtins.str], ...]) PassStmt:2())) [case testBaseClassFromIgnoredModule] diff --git a/test-data/unit/semanal-errors.test b/test-data/unit/semanal-errors.test index 269536f868a44..33c8f9b80aa01 100644 --- a/test-data/unit/semanal-errors.test +++ b/test-data/unit/semanal-errors.test @@ -1046,7 +1046,7 @@ T = TypeVar(b'T') # E: TypeVar() expects a string literal as first argument d = TypeVar('D') # E: String argument 1 "D" to TypeVar(...) does not match variable name "d" e = TypeVar('e', int, str, x=1) # E: Unexpected argument to "TypeVar()": "x" f = TypeVar('f', (int, str), int) # E: Type expected -g = TypeVar('g', int) # E: TypeVar cannot have only a single constraint +g = TypeVar('g', int) # E: Type variable must have at least two constrained types h = TypeVar('h', x=(int, str)) # E: Unexpected argument to "TypeVar()": "x" i = TypeVar('i', bound=1) # E: TypeVar "bound" must be a type j = TypeVar('j', covariant=None) # E: TypeVar "covariant" may only be a literal bool diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test index 916e2e3a8e179..5dcb0706a8cbc 100644 --- a/test-data/unit/stubgen.test +++ b/test-data/unit/stubgen.test @@ -4366,3 +4366,33 @@ class Foo(Enum): class Bar(Enum): A = ... B = ... + +[case testGracefullyHandleInvalidOptionalUsage] +from typing import Optional + +x: Optional # invalid +y: Optional[int] # valid +z: Optional[int, str] # invalid +w: Optional[int | str] # valid +r: Optional[type[int | str]] + +X = Optional +Y = Optional[int] +Z = Optional[int, str] +W = Optional[int | str] +R = Optional[type[int | str]] + +[out] +from _typeshed import Incomplete +from typing import Optional + +x: Incomplete +y: int | None +z: Incomplete +w: int | str | None +r: type[int | str] | None +X = Optional +Y = int | None +Z = Incomplete +W = int | str | None +R = type[int | str] | None