From 428b1723285bcb44b74d4986105d631d41c25e97 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 4 Nov 2022 00:26:07 -0700 Subject: [PATCH 01/16] Clarify docs surrounding install-types (#14003) See #13681 More clearly point out the speed cost and the alternative of installing stub packages directly --- docs/source/running_mypy.rst | 46 ++++++++++++------------------------ 1 file changed, 15 insertions(+), 31 deletions(-) diff --git a/docs/source/running_mypy.rst b/docs/source/running_mypy.rst index a7eb3fc5e1e7..264cec59749e 100644 --- a/docs/source/running_mypy.rst +++ b/docs/source/running_mypy.rst @@ -322,34 +322,27 @@ the library, you will get a message like this: main.py:1: note: Hint: "python3 -m pip install types-PyYAML" main.py:1: note: (or run "mypy --install-types" to install all missing stub packages) -You can resolve the issue by running the suggested pip command or -commands. Alternatively, you can use :option:`--install-types ` to install all known missing stubs: +You can resolve the issue by running the suggested pip commands. +If you're running mypy in CI, you can ensure the presence of any stub packages +you need the same as you would any other test dependency, e.g. by adding them to +the appropriate ``requirements.txt`` file. -.. code-block:: text - - mypy --install-types - -This installs any stub packages that were suggested in the previous -mypy run. You can also use your normal mypy command line with the -extra :option:`--install-types ` option to -install missing stubs at the end of the run (if any were found). - -Use :option:`--install-types ` with -:option:`--non-interactive ` to install all suggested -stub packages without asking for confirmation, *and* type check your -code, in a single command: +Alternatively, add the :option:`--install-types ` +to your mypy command to install all known missing stubs: .. code-block:: text - mypy --install-types --non-interactive src/ + mypy --install-types -This can be useful in Continuous Integration jobs if you'd prefer not -to manage stub packages manually. This is somewhat slower than -explicitly installing stubs before running mypy, since it may type -check your code twice -- the first time to find the missing stubs, and +This is slower than explicitly installing stubs, since if effectively +runs mypy twice -- the first time to find the missing stubs, and the second time to type check your code properly after mypy has -installed the stubs. +installed the stubs. It also can make controlling stub versions harder, +resulting in less reproducible type checking. + +By default, :option:`--install-types ` shows a confirmation prompt. +Use :option:`--non-interactive ` to install all suggested +stub packages without asking for confirmation *and* type check your code: If you've already installed the relevant third-party libraries in an environment other than the one mypy is running in, you can use :option:`--python-executable @@ -394,15 +387,6 @@ this error, try: you must run ``mypy ~/foo-project/src`` (or set the ``MYPYPATH`` to ``~/foo-project/src``. -In some rare cases, you may get the "Cannot find implementation or library -stub for module" error even when the module is installed in your system. -This can happen when the module is both missing type hints and is installed -on your system in an unconventional way. - -In this case, follow the steps above on how to handle -:ref:`missing type hints in third party libraries `. - - .. _finding-imports: How imports are found From 796068d06a0ab171d22f7be555c28dfad57db433 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Fri, 4 Nov 2022 11:04:52 +0000 Subject: [PATCH 02/16] running_mypy.rst: Fix typo (#14004) Introduced in #14003 --- docs/source/running_mypy.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/running_mypy.rst b/docs/source/running_mypy.rst index 264cec59749e..3deaf26023fc 100644 --- a/docs/source/running_mypy.rst +++ b/docs/source/running_mypy.rst @@ -334,7 +334,7 @@ to your mypy command to install all known missing stubs: mypy --install-types -This is slower than explicitly installing stubs, since if effectively +This is slower than explicitly installing stubs, since it effectively runs mypy twice -- the first time to find the missing stubs, and the second time to type check your code properly after mypy has installed the stubs. It also can make controlling stub versions harder, From 331b170c5d0e55a11a78a7f60fad5a8a8b5d6f2c Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Fri, 4 Nov 2022 16:03:33 +0000 Subject: [PATCH 03/16] Fix crash on nested unions in recursive types (#14007) Fixes #14000 This will introduce some minor perf penalty, but only for code that actually uses recursive types. --- mypy/typeops.py | 4 ++-- test-data/unit/check-recursive-types.test | 18 ++++++++++++++++++ 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/mypy/typeops.py b/mypy/typeops.py index 7eb1a67b46ea..5b29dc71991b 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -71,13 +71,13 @@ def is_recursive_pair(s: Type, t: Type) -> bool: """ if isinstance(s, TypeAliasType) and s.is_recursive: return ( - isinstance(get_proper_type(t), Instance) + isinstance(get_proper_type(t), (Instance, UnionType)) or isinstance(t, TypeAliasType) and t.is_recursive ) if isinstance(t, TypeAliasType) and t.is_recursive: return ( - isinstance(get_proper_type(s), Instance) + isinstance(get_proper_type(s), (Instance, UnionType)) or isinstance(s, TypeAliasType) and s.is_recursive ) diff --git a/test-data/unit/check-recursive-types.test b/test-data/unit/check-recursive-types.test index a0875c60362c..0d727b109658 100644 --- a/test-data/unit/check-recursive-types.test +++ b/test-data/unit/check-recursive-types.test @@ -808,3 +808,21 @@ def test2() -> Tree2: def test3() -> Tree3: return 42 # E: Incompatible return value type (got "int", expected "Union[str, Tuple[Tree3, Tree3, Tree3]]") [builtins fixtures/tuple.pyi] + +[case testRecursiveDoubleUnionNoCrash] +from typing import Tuple, Union, Callable, Sequence + +K = Union[int, Tuple[Union[int, K]]] +L = Union[int, Callable[[], Union[int, L]]] +M = Union[int, Sequence[Union[int, M]]] + +x: K +x = x +y: L +y = y +z: M +z = z + +x = y # E: Incompatible types in assignment (expression has type "L", variable has type "K") +z = x # OK +[builtins fixtures/tuple.pyi] From 807da2675bfdc7ec434f8d5677c13722a555e8da Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sat, 5 Nov 2022 18:51:59 -0700 Subject: [PATCH 04/16] Fix and optimise overload compatibility checking (#14018) Discovered as part of #14017 --- mypy/subtypes.py | 54 ++++++++++++--------------- test-data/unit/check-classes.test | 59 +++++++++++++++++++++++------- test-data/unit/check-selftype.test | 28 ++++++++++++++ 3 files changed, 96 insertions(+), 45 deletions(-) diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 38fae16e7011..2724379ab878 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -823,9 +823,8 @@ def visit_overloaded(self, left: Overloaded) -> bool: # Ensure each overload in the right side (the supertype) is accounted for. previous_match_left_index = -1 matched_overloads = set() - possible_invalid_overloads = set() - for right_index, right_item in enumerate(right.items): + for right_item in right.items: found_match = False for left_index, left_item in enumerate(left.items): @@ -834,43 +833,36 @@ def visit_overloaded(self, left: Overloaded) -> bool: # Order matters: we need to make sure that the index of # this item is at least the index of the previous one. if subtype_match and previous_match_left_index <= left_index: - if not found_match: - # Update the index of the previous match. - previous_match_left_index = left_index - found_match = True - matched_overloads.add(left_item) - possible_invalid_overloads.discard(left_item) + previous_match_left_index = left_index + found_match = True + matched_overloads.add(left_index) + break else: # If this one overlaps with the supertype in any way, but it wasn't # an exact match, then it's a potential error. strict_concat = self.options.strict_concatenate if self.options else True - if is_callable_compatible( - left_item, - right_item, - is_compat=self._is_subtype, - ignore_return=True, - ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, - strict_concatenate=strict_concat, - ) or is_callable_compatible( - right_item, - left_item, - is_compat=self._is_subtype, - ignore_return=True, - ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, - strict_concatenate=strict_concat, + if left_index not in matched_overloads and ( + is_callable_compatible( + left_item, + right_item, + is_compat=self._is_subtype, + ignore_return=True, + ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, + strict_concatenate=strict_concat, + ) + or is_callable_compatible( + right_item, + left_item, + is_compat=self._is_subtype, + ignore_return=True, + ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, + strict_concatenate=strict_concat, + ) ): - # If this is an overload that's already been matched, there's no - # problem. - if left_item not in matched_overloads: - possible_invalid_overloads.add(left_item) + return False if not found_match: return False - - if possible_invalid_overloads: - # There were potentially invalid overloads that were never matched to the - # supertype. - return False return True elif isinstance(right, UnboundType): return True diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index 4ed44c90f275..42aaa68b5873 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -3872,28 +3872,59 @@ class Super: def foo(self, a: C) -> C: pass class Sub(Super): - @overload # Fail + @overload def foo(self, a: A) -> A: pass @overload def foo(self, a: B) -> C: pass # Fail @overload def foo(self, a: C) -> C: pass + +class Sub2(Super): + @overload + def foo(self, a: B) -> C: pass # Fail + @overload + def foo(self, a: A) -> A: pass + @overload + def foo(self, a: C) -> C: pass + +class Sub3(Super): + @overload + def foo(self, a: A) -> int: pass + @overload + def foo(self, a: A) -> A: pass + @overload + def foo(self, a: C) -> C: pass [builtins fixtures/classmethod.pyi] [out] -tmp/foo.pyi:16: error: Signature of "foo" incompatible with supertype "Super" -tmp/foo.pyi:16: note: Superclass: -tmp/foo.pyi:16: note: @overload -tmp/foo.pyi:16: note: def foo(self, a: A) -> A -tmp/foo.pyi:16: note: @overload -tmp/foo.pyi:16: note: def foo(self, a: C) -> C -tmp/foo.pyi:16: note: Subclass: -tmp/foo.pyi:16: note: @overload -tmp/foo.pyi:16: note: def foo(self, a: A) -> A -tmp/foo.pyi:16: note: @overload -tmp/foo.pyi:16: note: def foo(self, a: B) -> C -tmp/foo.pyi:16: note: @overload -tmp/foo.pyi:16: note: def foo(self, a: C) -> C tmp/foo.pyi:19: error: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader +tmp/foo.pyi:24: error: Signature of "foo" incompatible with supertype "Super" +tmp/foo.pyi:24: note: Superclass: +tmp/foo.pyi:24: note: @overload +tmp/foo.pyi:24: note: def foo(self, a: A) -> A +tmp/foo.pyi:24: note: @overload +tmp/foo.pyi:24: note: def foo(self, a: C) -> C +tmp/foo.pyi:24: note: Subclass: +tmp/foo.pyi:24: note: @overload +tmp/foo.pyi:24: note: def foo(self, a: B) -> C +tmp/foo.pyi:24: note: @overload +tmp/foo.pyi:24: note: def foo(self, a: A) -> A +tmp/foo.pyi:24: note: @overload +tmp/foo.pyi:24: note: def foo(self, a: C) -> C +tmp/foo.pyi:25: error: Overloaded function signatures 1 and 2 overlap with incompatible return types +tmp/foo.pyi:32: error: Signature of "foo" incompatible with supertype "Super" +tmp/foo.pyi:32: note: Superclass: +tmp/foo.pyi:32: note: @overload +tmp/foo.pyi:32: note: def foo(self, a: A) -> A +tmp/foo.pyi:32: note: @overload +tmp/foo.pyi:32: note: def foo(self, a: C) -> C +tmp/foo.pyi:32: note: Subclass: +tmp/foo.pyi:32: note: @overload +tmp/foo.pyi:32: note: def foo(self, a: A) -> int +tmp/foo.pyi:32: note: @overload +tmp/foo.pyi:32: note: def foo(self, a: A) -> A +tmp/foo.pyi:32: note: @overload +tmp/foo.pyi:32: note: def foo(self, a: C) -> C +tmp/foo.pyi:35: error: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader [case testTypeTypeOverlapsWithObjectAndType] from foo import * diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test index 506e8bfe8ab1..bfb0eb5a4d89 100644 --- a/test-data/unit/check-selftype.test +++ b/test-data/unit/check-selftype.test @@ -128,6 +128,34 @@ reveal_type(cast(A, C()).copy()) # N: Revealed type is "__main__.A" [builtins fixtures/bool.pyi] +[case testSelfTypeOverrideCompatibility] +from typing import overload, TypeVar, Generic + +T = TypeVar("T") + +class A(Generic[T]): + @overload + def f(self: A[int]) -> int: ... + @overload + def f(self: A[str]) -> str: ... + def f(self): ... + +class B(A[T]): + @overload + def f(self: A[int]) -> int: ... + @overload + def f(self: A[str]) -> str: ... + def f(self): ... + +class B2(A[T]): + @overload + def f(self: A[int]) -> int: ... + @overload + def f(self: A[str]) -> str: ... + @overload + def f(self: A[bytes]) -> bytes: ... + def f(self): ... + [case testSelfTypeSuper] from typing import TypeVar, cast From e8de6d1fc5c908e738f69494de38ea191fb12e60 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Mon, 7 Nov 2022 10:34:35 +0000 Subject: [PATCH 05/16] Add support for exception groups and except* (#14020) Ref #12840 It looks like from the point of view of type checking support is quite easy. Mypyc support however requires some actual work, so I don't include it in this PR. --- mypy/checker.py | 37 +++++++++++++++---- mypy/fastparse.py | 2 +- mypy/message_registry.py | 3 ++ mypy/nodes.py | 7 ++-- mypy/strconv.py | 2 ++ mypy/treetransform.py | 4 ++- mypyc/irbuild/statement.py | 2 ++ test-data/unit/check-python311.test | 51 +++++++++++++++++++++++++-- test-data/unit/fixtures/exception.pyi | 11 ++++-- 9 files changed, 104 insertions(+), 15 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 8973ade98228..5744a4ef4937 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -4305,7 +4305,7 @@ def visit_try_without_finally(self, s: TryStmt, try_frame: bool) -> None: with self.binder.frame_context(can_skip=True, fall_through=4): typ = s.types[i] if typ: - t = self.check_except_handler_test(typ) + t = self.check_except_handler_test(typ, s.is_star) var = s.vars[i] if var: # To support local variables, we make this a definition line, @@ -4325,7 +4325,7 @@ def visit_try_without_finally(self, s: TryStmt, try_frame: bool) -> None: if s.else_body: self.accept(s.else_body) - def check_except_handler_test(self, n: Expression) -> Type: + def check_except_handler_test(self, n: Expression, is_star: bool) -> Type: """Type check an exception handler test clause.""" typ = self.expr_checker.accept(n) @@ -4341,22 +4341,47 @@ def check_except_handler_test(self, n: Expression) -> Type: item = ttype.items[0] if not item.is_type_obj(): self.fail(message_registry.INVALID_EXCEPTION_TYPE, n) - return AnyType(TypeOfAny.from_error) - exc_type = item.ret_type + return self.default_exception_type(is_star) + exc_type = erase_typevars(item.ret_type) elif isinstance(ttype, TypeType): exc_type = ttype.item else: self.fail(message_registry.INVALID_EXCEPTION_TYPE, n) - return AnyType(TypeOfAny.from_error) + return self.default_exception_type(is_star) if not is_subtype(exc_type, self.named_type("builtins.BaseException")): self.fail(message_registry.INVALID_EXCEPTION_TYPE, n) - return AnyType(TypeOfAny.from_error) + return self.default_exception_type(is_star) all_types.append(exc_type) + if is_star: + new_all_types: list[Type] = [] + for typ in all_types: + if is_proper_subtype(typ, self.named_type("builtins.BaseExceptionGroup")): + self.fail(message_registry.INVALID_EXCEPTION_GROUP, n) + new_all_types.append(AnyType(TypeOfAny.from_error)) + else: + new_all_types.append(typ) + return self.wrap_exception_group(new_all_types) return make_simplified_union(all_types) + def default_exception_type(self, is_star: bool) -> Type: + """Exception type to return in case of a previous type error.""" + any_type = AnyType(TypeOfAny.from_error) + if is_star: + return self.named_generic_type("builtins.ExceptionGroup", [any_type]) + return any_type + + def wrap_exception_group(self, types: Sequence[Type]) -> Type: + """Transform except* variable type into an appropriate exception group.""" + arg = make_simplified_union(types) + if is_subtype(arg, self.named_type("builtins.Exception")): + base = "builtins.ExceptionGroup" + else: + base = "builtins.BaseExceptionGroup" + return self.named_generic_type(base, [arg]) + def get_types_from_except_handler(self, typ: Type, n: Expression) -> list[Type]: """Helper for check_except_handler_test to retrieve handler types.""" typ = get_proper_type(typ) diff --git a/mypy/fastparse.py b/mypy/fastparse.py index 0d42ef53f456..209ebb89f36b 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -1254,7 +1254,6 @@ def visit_Try(self, n: ast3.Try) -> TryStmt: return self.set_line(node, n) def visit_TryStar(self, n: TryStar) -> TryStmt: - # TODO: we treat TryStar exactly like Try, which makes mypy not crash. See #12840 vs = [ self.set_line(NameExpr(h.name), h) if h.name is not None else None for h in n.handlers ] @@ -1269,6 +1268,7 @@ def visit_TryStar(self, n: TryStar) -> TryStmt: self.as_block(n.orelse, n.lineno), self.as_block(n.finalbody, n.lineno), ) + node.is_star = True return self.set_line(node, n) # Assert(expr test, expr? msg) diff --git a/mypy/message_registry.py b/mypy/message_registry.py index c84ce120dbda..18acb2cd7a71 100644 --- a/mypy/message_registry.py +++ b/mypy/message_registry.py @@ -44,6 +44,9 @@ def with_additional_msg(self, info: str) -> ErrorMessage: NO_RETURN_EXPECTED: Final = ErrorMessage("Return statement in function which does not return") INVALID_EXCEPTION: Final = ErrorMessage("Exception must be derived from BaseException") INVALID_EXCEPTION_TYPE: Final = ErrorMessage("Exception type must be derived from BaseException") +INVALID_EXCEPTION_GROUP: Final = ErrorMessage( + "Exception type in except* cannot derive from BaseExceptionGroup" +) RETURN_IN_ASYNC_GENERATOR: Final = ErrorMessage( '"return" with value in async generator is not allowed' ) diff --git a/mypy/nodes.py b/mypy/nodes.py index 9221ec48aa61..0ea89611dc1a 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -1529,9 +1529,9 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class TryStmt(Statement): - __slots__ = ("body", "types", "vars", "handlers", "else_body", "finally_body") + __slots__ = ("body", "types", "vars", "handlers", "else_body", "finally_body", "is_star") - __match_args__ = ("body", "types", "vars", "handlers", "else_body", "finally_body") + __match_args__ = ("body", "types", "vars", "handlers", "else_body", "finally_body", "is_star") body: Block # Try body # Plain 'except:' also possible @@ -1540,6 +1540,8 @@ class TryStmt(Statement): handlers: list[Block] # Except bodies else_body: Block | None finally_body: Block | None + # Whether this is try ... except* (added in Python 3.11) + is_star: bool def __init__( self, @@ -1557,6 +1559,7 @@ def __init__( self.handlers = handlers self.else_body = else_body self.finally_body = finally_body + self.is_star = False def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_try_stmt(self) diff --git a/mypy/strconv.py b/mypy/strconv.py index 1acf7699316c..9b369618b88e 100644 --- a/mypy/strconv.py +++ b/mypy/strconv.py @@ -276,6 +276,8 @@ def visit_del_stmt(self, o: mypy.nodes.DelStmt) -> str: def visit_try_stmt(self, o: mypy.nodes.TryStmt) -> str: a: list[Any] = [o.body] + if o.is_star: + a.append("*") for i in range(len(o.vars)): a.append(o.types[i]) diff --git a/mypy/treetransform.py b/mypy/treetransform.py index d7f159d02a22..c863db6b3dd5 100644 --- a/mypy/treetransform.py +++ b/mypy/treetransform.py @@ -373,7 +373,7 @@ def visit_raise_stmt(self, node: RaiseStmt) -> RaiseStmt: return RaiseStmt(self.optional_expr(node.expr), self.optional_expr(node.from_expr)) def visit_try_stmt(self, node: TryStmt) -> TryStmt: - return TryStmt( + new = TryStmt( self.block(node.body), self.optional_names(node.vars), self.optional_expressions(node.types), @@ -381,6 +381,8 @@ def visit_try_stmt(self, node: TryStmt) -> TryStmt: self.optional_block(node.else_body), self.optional_block(node.finally_body), ) + new.is_star = node.is_star + return new def visit_with_stmt(self, node: WithStmt) -> WithStmt: new = WithStmt( diff --git a/mypyc/irbuild/statement.py b/mypyc/irbuild/statement.py index 371a305e67b9..a1d36c011aa1 100644 --- a/mypyc/irbuild/statement.py +++ b/mypyc/irbuild/statement.py @@ -616,6 +616,8 @@ def transform_try_stmt(builder: IRBuilder, t: TryStmt) -> None: # constructs that we compile separately. When we have a # try/except/else/finally, we treat the try/except/else as the # body of a try/finally block. + if t.is_star: + builder.error("Exception groups and except* cannot be compiled yet", t.line) if t.finally_body: def transform_try_body() -> None: diff --git a/test-data/unit/check-python311.test b/test-data/unit/check-python311.test index b98bccc9059d..9bf62b0c489d 100644 --- a/test-data/unit/check-python311.test +++ b/test-data/unit/check-python311.test @@ -1,6 +1,53 @@ -[case testTryStarDoesNotCrash] +[case testTryStarSimple] try: pass except* Exception as e: - reveal_type(e) # N: Revealed type is "builtins.Exception" + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[builtins.Exception]" +[builtins fixtures/exception.pyi] + +[case testTryStarMultiple] +try: + pass +except* Exception as e: + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[builtins.Exception]" +except* RuntimeError as e: + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[builtins.RuntimeError]" +[builtins fixtures/exception.pyi] + +[case testTryStarBase] +try: + pass +except* BaseException as e: + reveal_type(e) # N: Revealed type is "builtins.BaseExceptionGroup[builtins.BaseException]" +[builtins fixtures/exception.pyi] + +[case testTryStarTuple] +class Custom(Exception): ... + +try: + pass +except* (RuntimeError, Custom) as e: + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[Union[builtins.RuntimeError, __main__.Custom]]" +[builtins fixtures/exception.pyi] + +[case testTryStarInvalidType] +class Bad: ... +try: + pass +except* (RuntimeError, Bad) as e: # E: Exception type must be derived from BaseException + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[Any]" +[builtins fixtures/exception.pyi] + +[case testTryStarGroupInvalid] +try: + pass +except* ExceptionGroup as e: # E: Exception type in except* cannot derive from BaseExceptionGroup + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[Any]" +[builtins fixtures/exception.pyi] + +[case testTryStarGroupInvalidTuple] +try: + pass +except* (RuntimeError, ExceptionGroup) as e: # E: Exception type in except* cannot derive from BaseExceptionGroup + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[Union[builtins.RuntimeError, Any]]" [builtins fixtures/exception.pyi] diff --git a/test-data/unit/fixtures/exception.pyi b/test-data/unit/fixtures/exception.pyi index bf6d21c8716e..1c88723e7191 100644 --- a/test-data/unit/fixtures/exception.pyi +++ b/test-data/unit/fixtures/exception.pyi @@ -1,3 +1,4 @@ +import sys from typing import Generic, TypeVar T = TypeVar('T') @@ -5,7 +6,8 @@ class object: def __init__(self): pass class type: pass -class tuple(Generic[T]): pass +class tuple(Generic[T]): + def __ge__(self, other: object) -> bool: ... class function: pass class int: pass class str: pass @@ -13,11 +15,14 @@ class unicode: pass class bool: pass class ellipsis: pass -# Note: this is a slight simplification. In Python 2, the inheritance hierarchy -# is actually Exception -> StandardError -> RuntimeError -> ... class BaseException: def __init__(self, *args: object) -> None: ... class Exception(BaseException): pass class RuntimeError(Exception): pass class NotImplementedError(RuntimeError): pass +if sys.version_info >= (3, 11): + _BT_co = TypeVar("_BT_co", bound=BaseException, covariant=True) + _T_co = TypeVar("_T_co", bound=Exception, covariant=True) + class BaseExceptionGroup(BaseException, Generic[_BT_co]): ... + class ExceptionGroup(BaseExceptionGroup[_T_co], Exception): ... From d2a3e667bf0bedb19a1e88baaf7f3ffebc12c74a Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Mon, 7 Nov 2022 05:28:37 -0800 Subject: [PATCH 06/16] Fix crash with PartialTypes and the enum plugin (#14021) Fixes #12109. The original issue reported that the bug had to do with the use of the `--follow-imports=skip` flag. However, it turned out this was a red herring after closer inspection: I was able to trigger a more minimal repro both with and without this flag: ```python from enum import Enum class Foo(Enum): a = [] # E: Need type annotation for "a" (hint: "a: List[] = ...") b = None def check(self) -> None: reveal_type(Foo.a.value) # N: Revealed type is "" reveal_type(Foo.b.value) # N: Revealed type is "" ``` The first two `reveal_types` demonstrate the crux of the bug: the enum plugin does not correctly handle and convert partial types into regular types when inferring the type of the `.value` field. This can then cause any number of downstream problems. For example, suppose we modify `def check(...)` so it runs `reveal_type(self.value)`. Doing this will trigger a crash in mypy because it makes the enum plugin eventually try running `is_equivalent(...)` on the two partial types. But `is_equivalent` does not support partial types, so we crash. I opted to solve this problem by: 1. Making the enum plugin explicitly call the `fixup_partial_types` function on all field types. This prevents the code from crashing. 2. Modifies mypy so that Final vars are never marked as being PartialTypes. Without this, `reveal_type(Foo.b.value)` would report a type of `Union[Any, None]` instead of just `None`. (Note that all enum fields are implicitly final). --- mypy/checker.py | 50 ++++++++++++++++------------------ mypy/checkexpr.py | 3 +- mypy/plugins/enums.py | 3 +- mypy/typeops.py | 15 ++++++++++ test-data/unit/check-enum.test | 27 ++++++++++++++++++ 5 files changed, 70 insertions(+), 28 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 5744a4ef4937..3ebc829daa0e 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -159,6 +159,7 @@ erase_to_bound, erase_to_union_or_bound, false_only, + fixup_partial_type, function_type, get_type_vars, is_literal_type_like, @@ -2738,8 +2739,8 @@ def check_assignment( # None initializers preserve the partial None type. return - if is_valid_inferred_type(rvalue_type): - var = lvalue_type.var + var = lvalue_type.var + if is_valid_inferred_type(rvalue_type, is_lvalue_final=var.is_final): partial_types = self.find_partial_types(var) if partial_types is not None: if not self.current_node_deferred: @@ -3687,7 +3688,10 @@ def infer_variable_type( """Infer the type of initialized variables from initializer type.""" if isinstance(init_type, DeletedType): self.msg.deleted_as_rvalue(init_type, context) - elif not is_valid_inferred_type(init_type) and not self.no_partial_types: + elif ( + not is_valid_inferred_type(init_type, is_lvalue_final=name.is_final) + and not self.no_partial_types + ): # We cannot use the type of the initialization expression for full type # inference (it's not specific enough), but we might be able to give # partial type which will be made more specific later. A partial type @@ -6114,7 +6118,7 @@ def enter_partial_types( self.msg.need_annotation_for_var(var, context, self.options.python_version) self.partial_reported.add(var) if var.type: - fixed = self.fixup_partial_type(var.type) + fixed = fixup_partial_type(var.type) var.invalid_partial_type = fixed != var.type var.type = fixed @@ -6145,20 +6149,7 @@ def handle_partial_var_type( else: # Defer the node -- we might get a better type in the outer scope self.handle_cannot_determine_type(node.name, context) - return self.fixup_partial_type(typ) - - def fixup_partial_type(self, typ: Type) -> Type: - """Convert a partial type that we couldn't resolve into something concrete. - - This means, for None we make it Optional[Any], and for anything else we - fill in all of the type arguments with Any. - """ - if not isinstance(typ, PartialType): - return typ - if typ.type is None: - return UnionType.make_union([AnyType(TypeOfAny.unannotated), NoneType()]) - else: - return Instance(typ.type, [AnyType(TypeOfAny.unannotated)] * len(typ.type.type_vars)) + return fixup_partial_type(typ) def is_defined_in_base_class(self, var: Var) -> bool: if var.info: @@ -7006,20 +6997,27 @@ def infer_operator_assignment_method(typ: Type, operator: str) -> tuple[bool, st return False, method -def is_valid_inferred_type(typ: Type) -> bool: - """Is an inferred type valid? +def is_valid_inferred_type(typ: Type, is_lvalue_final: bool = False) -> bool: + """Is an inferred type valid and needs no further refinement? - Examples of invalid types include the None type or List[]. + Examples of invalid types include the None type (when we are not assigning + None to a final lvalue) or List[]. When not doing strict Optional checking, all types containing None are invalid. When doing strict Optional checking, only None and types that are incompletely defined (i.e. contain UninhabitedType) are invalid. """ - if isinstance(get_proper_type(typ), (NoneType, UninhabitedType)): - # With strict Optional checking, we *may* eventually infer NoneType when - # the initializer is None, but we only do that if we can't infer a - # specific Optional type. This resolution happens in - # leave_partial_types when we pop a partial types scope. + proper_type = get_proper_type(typ) + if isinstance(proper_type, NoneType): + # If the lvalue is final, we may immediately infer NoneType when the + # initializer is None. + # + # If not, we want to defer making this decision. The final inferred + # type could either be NoneType or an Optional type, depending on + # the context. This resolution happens in leave_partial_types when + # we pop a partial types scope. + return is_lvalue_final + elif isinstance(proper_type, UninhabitedType): return False return not typ.accept(NothingSeeker()) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index ac16f9c9c813..0c392ae755d7 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -111,6 +111,7 @@ custom_special_method, erase_to_union_or_bound, false_only, + fixup_partial_type, function_type, is_literal_type_like, make_simplified_union, @@ -2925,7 +2926,7 @@ def find_partial_type_ref_fast_path(self, expr: Expression) -> Type | None: if isinstance(expr.node, Var): result = self.analyze_var_ref(expr.node, expr) if isinstance(result, PartialType) and result.type is not None: - self.chk.store_type(expr, self.chk.fixup_partial_type(result)) + self.chk.store_type(expr, fixup_partial_type(result)) return result return None diff --git a/mypy/plugins/enums.py b/mypy/plugins/enums.py index 75b301252f06..1acf42d11ee6 100644 --- a/mypy/plugins/enums.py +++ b/mypy/plugins/enums.py @@ -19,7 +19,7 @@ from mypy.nodes import TypeInfo from mypy.semanal_enum import ENUM_BASES from mypy.subtypes import is_equivalent -from mypy.typeops import make_simplified_union +from mypy.typeops import fixup_partial_type, make_simplified_union from mypy.types import CallableType, Instance, LiteralType, ProperType, Type, get_proper_type ENUM_NAME_ACCESS: Final = {f"{prefix}.name" for prefix in ENUM_BASES} | { @@ -77,6 +77,7 @@ def _infer_value_type_with_auto_fallback( """ if proper_type is None: return None + proper_type = get_proper_type(fixup_partial_type(proper_type)) if not (isinstance(proper_type, Instance) and proper_type.type.fullname == "enum.auto"): return proper_type assert isinstance(ctx.type, Instance), "An incorrect ctx.type was passed." diff --git a/mypy/typeops.py b/mypy/typeops.py index 5b29dc71991b..9f224e02c088 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -41,6 +41,7 @@ Overloaded, Parameters, ParamSpecType, + PartialType, ProperType, TupleType, Type, @@ -1016,3 +1017,17 @@ def try_getting_instance_fallback(typ: Type) -> Instance | None: elif isinstance(typ, TypeVarType): return try_getting_instance_fallback(typ.upper_bound) return None + + +def fixup_partial_type(typ: Type) -> Type: + """Convert a partial type that we couldn't resolve into something concrete. + + This means, for None we make it Optional[Any], and for anything else we + fill in all of the type arguments with Any. + """ + if not isinstance(typ, PartialType): + return typ + if typ.type is None: + return UnionType.make_union([AnyType(TypeOfAny.unannotated), NoneType()]) + else: + return Instance(typ.type, [AnyType(TypeOfAny.unannotated)] * len(typ.type.type_vars)) diff --git a/test-data/unit/check-enum.test b/test-data/unit/check-enum.test index 039ddd1621cd..db8643455099 100644 --- a/test-data/unit/check-enum.test +++ b/test-data/unit/check-enum.test @@ -2100,3 +2100,30 @@ class Some: class A(Some, Enum): __labels__ = {1: "1"} [builtins fixtures/dict.pyi] + +[case testEnumWithPartialTypes] +from enum import Enum + +class Mixed(Enum): + a = [] # E: Need type annotation for "a" (hint: "a: List[] = ...") + b = None + + def check(self) -> None: + reveal_type(Mixed.a.value) # N: Revealed type is "builtins.list[Any]" + reveal_type(Mixed.b.value) # N: Revealed type is "None" + + # Inferring Any here instead of a union seems to be a deliberate + # choice; see the testEnumValueInhomogenous case above. + reveal_type(self.value) # N: Revealed type is "Any" + + for field in Mixed: + reveal_type(field.value) # N: Revealed type is "Any" + if field.value is None: + pass + +class AllPartialList(Enum): + a = [] # E: Need type annotation for "a" (hint: "a: List[] = ...") + b = [] # E: Need type annotation for "b" (hint: "b: List[] = ...") + + def check(self) -> None: + reveal_type(self.value) # N: Revealed type is "builtins.list[Any]" From 39d35cdee1bd02a6fb071334273e1c1fb0893066 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 7 Nov 2022 15:38:41 +0000 Subject: [PATCH 07/16] Fix new style union syntax in type aliases (#14008) Fix Python 3.10 `|` union syntax in type aliases, when one of the operands is a type alias or a type with an overloaded `__init__`. We can now infer `typing._SpecialForm` for type aliases in a runtime context. Also create a bunch of minimal test-only stubs for stdlib modules to fix some test failures caused by the missing `typing._SpecialForm` in the default test stubs. This is generally what we want in any case, since using typeshed stubs with minimal builtins/typing stubs can result in unpredictable behavior and slow tests. Fixes #12368. Fixes #12005. Fixes #11426. --- mypy/checker.py | 8 ++-- mypy/checkexpr.py | 7 ++- mypy/messages.py | 4 ++ mypyc/test-data/run-async.test | 3 ++ test-data/unit/check-ctypes.test | 13 ++++++ test-data/unit/check-dataclasses.test | 3 +- test-data/unit/check-generics.test | 8 ++-- test-data/unit/check-literal.test | 5 +- test-data/unit/check-python310.test | 16 +++++++ test-data/unit/check-type-aliases.test | 26 ++++++----- test-data/unit/fixtures/args.pyi | 1 + test-data/unit/fixtures/type.pyi | 1 + test-data/unit/fixtures/typing-full.pyi | 2 + test-data/unit/fixtures/typing-medium.pyi | 2 + test-data/unit/lib-stub/_decimal.pyi | 4 ++ test-data/unit/lib-stub/datetime.pyi | 16 +++++++ test-data/unit/lib-stub/decimal.pyi | 3 ++ test-data/unit/lib-stub/functools.pyi | 35 ++++++++++++++ test-data/unit/lib-stub/traceback.pyi | 3 ++ test-data/unit/lib-stub/unannotated_lib.pyi | 1 + test-data/unit/pythoneval.test | 52 +++++++++++++++++++++ 21 files changed, 186 insertions(+), 27 deletions(-) create mode 100644 test-data/unit/lib-stub/_decimal.pyi create mode 100644 test-data/unit/lib-stub/datetime.pyi create mode 100644 test-data/unit/lib-stub/decimal.pyi create mode 100644 test-data/unit/lib-stub/functools.pyi create mode 100644 test-data/unit/lib-stub/traceback.pyi create mode 100644 test-data/unit/lib-stub/unannotated_lib.pyi diff --git a/mypy/checker.py b/mypy/checker.py index 3ebc829daa0e..67d132afe2c7 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -6048,11 +6048,11 @@ def lookup_qualified(self, name: str) -> SymbolTableNode: last = parts[-1] if last in n.names: return n.names[last] - elif len(parts) == 2 and parts[0] == "builtins": - fullname = "builtins." + last + elif len(parts) == 2 and parts[0] in ("builtins", "typing"): + fullname = ".".join(parts) if fullname in SUGGESTED_TEST_FIXTURES: - suggestion = ", e.g. add '[builtins fixtures/{}]' to your test".format( - SUGGESTED_TEST_FIXTURES[fullname] + suggestion = ", e.g. add '[{} fixtures/{}]' to your test".format( + parts[0], SUGGESTED_TEST_FIXTURES[fullname] ) else: suggestion = "" diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 0c392ae755d7..a271fb876bf3 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -3870,9 +3870,8 @@ class LongName(Generic[T]): ... else: if alias_definition: return AnyType(TypeOfAny.special_form) - # This type is invalid in most runtime contexts, give it an 'object' type. - # TODO: Use typing._SpecialForm instead? - return self.named_type("builtins.object") + # The _SpecialForm type can be used in some runtime contexts (e.g. it may have __or__). + return self.named_type("typing._SpecialForm") def apply_type_arguments_to_callable( self, tp: Type, args: Sequence[Type], ctx: Context @@ -4742,7 +4741,7 @@ def has_member(self, typ: Type, member: str) -> bool: typ = typ.fallback if isinstance(typ, Instance): return typ.type.has_readable_member(member) - if isinstance(typ, CallableType) and typ.is_type_obj(): + if isinstance(typ, FunctionLike) and typ.is_type_obj(): return typ.fallback.type.has_readable_member(member) elif isinstance(typ, AnyType): return True diff --git a/mypy/messages.py b/mypy/messages.py index 4e762faa0b32..e11ee9d0f7f2 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -132,6 +132,7 @@ "builtins.isinstance": "isinstancelist.pyi", "builtins.property": "property.pyi", "builtins.classmethod": "classmethod.pyi", + "typing._SpecialForm": "typing-medium.pyi", } @@ -2253,6 +2254,9 @@ def format_literal_value(typ: LiteralType) -> str: if itype.extra_attrs and itype.extra_attrs.mod_name and module_names: return f"{base_str} {itype.extra_attrs.mod_name}" return base_str + if itype.type.fullname == "typing._SpecialForm": + # This is not a real type but used for some typing-related constructs. + return "" if verbosity >= 2 or (fullnames and itype.type.fullname in fullnames): base_str = itype.type.fullname else: diff --git a/mypyc/test-data/run-async.test b/mypyc/test-data/run-async.test index e664ed3bb55a..85ad172d61df 100644 --- a/mypyc/test-data/run-async.test +++ b/mypyc/test-data/run-async.test @@ -13,6 +13,9 @@ async def g() -> int: async def f() -> int: return await g() +[file asyncio/__init__.pyi] +async def sleep(t: float) -> None: ... + [typing fixtures/typing-full.pyi] [file driver.py] diff --git a/test-data/unit/check-ctypes.test b/test-data/unit/check-ctypes.test index 5a350256f8e9..beb1afd779c0 100644 --- a/test-data/unit/check-ctypes.test +++ b/test-data/unit/check-ctypes.test @@ -20,6 +20,7 @@ a[3] = b"bytes" # E: No overload variant of "__setitem__" of "Array" matches ar for x in a: reveal_type(x) # N: Revealed type is "builtins.int" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesArrayCustomElementType] import ctypes @@ -52,6 +53,7 @@ myu: Union[ctypes.Array[ctypes.c_int], List[str]] for myi in myu: reveal_type(myi) # N: Revealed type is "Union[builtins.int, builtins.str]" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesArrayUnionElementType] import ctypes @@ -76,6 +78,7 @@ mya[3] = b"bytes" # E: No overload variant of "__setitem__" of "Array" matches for myx in mya: reveal_type(myx) # N: Revealed type is "Union[__main__.MyCInt, builtins.int]" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesCharArrayAttrs] import ctypes @@ -84,6 +87,7 @@ ca = (ctypes.c_char * 4)(b'a', b'b', b'c', b'\x00') reveal_type(ca.value) # N: Revealed type is "builtins.bytes" reveal_type(ca.raw) # N: Revealed type is "builtins.bytes" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesCharPArrayDoesNotCrash] import ctypes @@ -91,6 +95,7 @@ import ctypes # The following line used to crash with "Could not find builtin symbol 'NoneType'" ca = (ctypes.c_char_p * 0)() [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesWcharArrayAttrs] import ctypes @@ -99,6 +104,7 @@ wca = (ctypes.c_wchar * 4)('a', 'b', 'c', '\x00') reveal_type(wca.value) # N: Revealed type is "builtins.str" wca.raw # E: Array attribute "raw" is only available with element type "c_char", not "c_wchar" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesCharUnionArrayAttrs] import ctypes @@ -108,6 +114,7 @@ cua: ctypes.Array[Union[ctypes.c_char, ctypes.c_wchar]] reveal_type(cua.value) # N: Revealed type is "Union[builtins.bytes, builtins.str]" cua.raw # E: Array attribute "raw" is only available with element type "c_char", not "Union[c_char, c_wchar]" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesAnyUnionArrayAttrs] import ctypes @@ -117,6 +124,7 @@ caa: ctypes.Array[Union[ctypes.c_char, Any]] reveal_type(caa.value) # N: Revealed type is "Union[builtins.bytes, Any]" reveal_type(caa.raw) # N: Revealed type is "builtins.bytes" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesOtherUnionArrayAttrs] import ctypes @@ -126,6 +134,7 @@ cua: ctypes.Array[Union[ctypes.c_char, ctypes.c_int]] cua.value # E: Array attribute "value" is only available with element type "c_char" or "c_wchar", not "Union[c_char, c_int]" cua.raw # E: Array attribute "raw" is only available with element type "c_char", not "Union[c_char, c_int]" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesAnyArrayAttrs] import ctypes @@ -134,6 +143,7 @@ aa: ctypes.Array[Any] reveal_type(aa.value) # N: Revealed type is "Any" reveal_type(aa.raw) # N: Revealed type is "builtins.bytes" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesOtherArrayAttrs] import ctypes @@ -142,6 +152,7 @@ oa = (ctypes.c_int * 4)(1, 2, 3, 4) oa.value # E: Array attribute "value" is only available with element type "c_char" or "c_wchar", not "c_int" oa.raw # E: Array attribute "raw" is only available with element type "c_char", not "c_int" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesArrayConstructorStarargs] import ctypes @@ -154,6 +165,7 @@ reveal_type(intarr4(*int_values)) # N: Revealed type is "ctypes.Array[ctypes.c_ reveal_type(intarr4(*c_int_values)) # N: Revealed type is "ctypes.Array[ctypes.c_int]" reveal_type(intarr6(1, ctypes.c_int(2), *int_values)) # N: Revealed type is "ctypes.Array[ctypes.c_int]" reveal_type(intarr6(1, ctypes.c_int(2), *c_int_values)) # N: Revealed type is "ctypes.Array[ctypes.c_int]" +[typing fixtures/typing-medium.pyi] float_values = [1.0, 2.0, 3.0, 4.0] intarr4(*float_values) # E: Array constructor argument 1 of type "List[float]" is not convertible to the array element type "Iterable[c_int]" @@ -167,3 +179,4 @@ x = {"a": 1, "b": 2} intarr4(**x) [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index 3ec4c60e6929..d4064124109b 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -633,8 +633,9 @@ class Two: c = Two() x = c.S -reveal_type(x) # N: Revealed type is "builtins.object" +reveal_type(x) # N: Revealed type is "typing._SpecialForm" [builtins fixtures/dataclasses.pyi] +[typing fixtures/typing-medium.pyi] [case testDataclassOrdering] # flags: --python-version 3.7 diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test index b7d98a783a49..7df52b60fc0b 100644 --- a/test-data/unit/check-generics.test +++ b/test-data/unit/check-generics.test @@ -1049,20 +1049,20 @@ CA = Callable[[T], int] TA = Tuple[T, int] UA = Union[T, int] -cs = CA + 1 # E: Unsupported left operand type for + ("object") +cs = CA + 1 # E: Unsupported left operand type for + ("") reveal_type(cs) # N: Revealed type is "Any" -ts = TA() # E: "object" not callable +ts = TA() # E: "" not callable reveal_type(ts) # N: Revealed type is "Any" -us = UA.x # E: "object" has no attribute "x" +us = UA.x # E: "" has no attribute "x" reveal_type(us) # N: Revealed type is "Any" xx = CA[str] + 1 # E: Type application is only supported for generic classes yy = TA[str]() # E: Type application is only supported for generic classes zz = UA[str].x # E: Type application is only supported for generic classes [builtins fixtures/tuple.pyi] - +[typing fixtures/typing-medium.pyi] [out] [case testGenericTypeAliasesTypeVarBinding] diff --git a/test-data/unit/check-literal.test b/test-data/unit/check-literal.test index da8f1570a4f4..ef8c9095e58a 100644 --- a/test-data/unit/check-literal.test +++ b/test-data/unit/check-literal.test @@ -1484,16 +1484,17 @@ Alias = Literal[3] isinstance(3, Literal[3]) # E: Cannot use isinstance() with Literal type isinstance(3, Alias) # E: Cannot use isinstance() with Literal type \ - # E: Argument 2 to "isinstance" has incompatible type "object"; expected "Union[type, Tuple[Any, ...]]" + # E: Argument 2 to "isinstance" has incompatible type ""; expected "Union[type, Tuple[Any, ...]]" isinstance(3, Renamed[3]) # E: Cannot use isinstance() with Literal type isinstance(3, indirect.Literal[3]) # E: Cannot use isinstance() with Literal type issubclass(int, Literal[3]) # E: Cannot use issubclass() with Literal type issubclass(int, Alias) # E: Cannot use issubclass() with Literal type \ - # E: Argument 2 to "issubclass" has incompatible type "object"; expected "Union[type, Tuple[Any, ...]]" + # E: Argument 2 to "issubclass" has incompatible type ""; expected "Union[type, Tuple[Any, ...]]" issubclass(int, Renamed[3]) # E: Cannot use issubclass() with Literal type issubclass(int, indirect.Literal[3]) # E: Cannot use issubclass() with Literal type [builtins fixtures/isinstancelist.pyi] +[typing fixtures/typing-medium.pyi] [out] [case testLiteralErrorsWhenSubclassed] diff --git a/test-data/unit/check-python310.test b/test-data/unit/check-python310.test index 1548d5dadcfd..3b90a910e943 100644 --- a/test-data/unit/check-python310.test +++ b/test-data/unit/check-python310.test @@ -1788,3 +1788,19 @@ def f6(a: object) -> None: case _ if y is not None: # E: Name "y" may be undefined pass [builtins fixtures/tuple.pyi] + +[case testTypeAliasWithNewUnionSyntaxAndNoneLeftOperand] +from typing import overload +class C: + @overload + def __init__(self) -> None: pass + @overload + def __init__(self, x: int) -> None: pass + def __init__(self, x=0): + pass + +class D: pass + +X = None | C +Y = None | D +[builtins fixtures/type.pyi] diff --git a/test-data/unit/check-type-aliases.test b/test-data/unit/check-type-aliases.test index 8dafc8f47a6c..fab372976ab2 100644 --- a/test-data/unit/check-type-aliases.test +++ b/test-data/unit/check-type-aliases.test @@ -821,28 +821,28 @@ c = Child() reveal_type(NormalImplicit) # N: Revealed type is "def () -> __main__.Foo" reveal_type(NormalExplicit) # N: Revealed type is "def () -> __main__.Foo" -reveal_type(SpecialImplicit) # N: Revealed type is "builtins.object" -reveal_type(SpecialExplicit) # N: Revealed type is "builtins.object" +reveal_type(SpecialImplicit) # N: Revealed type is "typing._SpecialForm" +reveal_type(SpecialExplicit) # N: Revealed type is "typing._SpecialForm" reveal_type(Parent.NormalImplicit) # N: Revealed type is "def () -> __main__.Foo" reveal_type(Parent.NormalExplicit) # N: Revealed type is "def () -> __main__.Foo" -reveal_type(Parent.SpecialImplicit) # N: Revealed type is "builtins.object" -reveal_type(Parent.SpecialExplicit) # N: Revealed type is "builtins.object" +reveal_type(Parent.SpecialImplicit) # N: Revealed type is "typing._SpecialForm" +reveal_type(Parent.SpecialExplicit) # N: Revealed type is "typing._SpecialForm" reveal_type(Child.NormalImplicit) # N: Revealed type is "def () -> __main__.Foo" reveal_type(Child.NormalExplicit) # N: Revealed type is "def () -> __main__.Foo" -reveal_type(Child.SpecialImplicit) # N: Revealed type is "builtins.object" -reveal_type(Child.SpecialExplicit) # N: Revealed type is "builtins.object" +reveal_type(Child.SpecialImplicit) # N: Revealed type is "typing._SpecialForm" +reveal_type(Child.SpecialExplicit) # N: Revealed type is "typing._SpecialForm" reveal_type(p.NormalImplicit) # N: Revealed type is "def () -> __main__.Foo" reveal_type(p.NormalExplicit) # N: Revealed type is "def () -> __main__.Foo" -reveal_type(p.SpecialImplicit) # N: Revealed type is "builtins.object" -reveal_type(p.SpecialExplicit) # N: Revealed type is "builtins.object" +reveal_type(p.SpecialImplicit) # N: Revealed type is "typing._SpecialForm" +reveal_type(p.SpecialExplicit) # N: Revealed type is "typing._SpecialForm" reveal_type(c.NormalImplicit) # N: Revealed type is "def () -> __main__.Foo" reveal_type(p.NormalExplicit) # N: Revealed type is "def () -> __main__.Foo" -reveal_type(c.SpecialImplicit) # N: Revealed type is "builtins.object" -reveal_type(c.SpecialExplicit) # N: Revealed type is "builtins.object" +reveal_type(c.SpecialImplicit) # N: Revealed type is "typing._SpecialForm" +reveal_type(c.SpecialExplicit) # N: Revealed type is "typing._SpecialForm" # Use type aliases in a type alias context in a plausible way @@ -895,6 +895,7 @@ reveal_type(weird_child_2) # N: Revealed type is "def () -> Any" reveal_type(weird_child_3) # N: Revealed type is "def () -> Any" reveal_type(weird_child_4) # N: Revealed type is "def () -> Any" [builtins fixtures/tuple.pyi] +[typing fixtures/typing-medium.pyi] [case testMalformedTypeAliasRuntimeReassignments] from typing import Union @@ -927,8 +928,8 @@ SpecialExplicit = 4 # E: Cannot assign multiple types to name "SpecialExplicit" Parent.NormalImplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "Type[Foo]") Parent.NormalExplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "Type[Foo]") -Parent.SpecialImplicit = 4 -Parent.SpecialExplicit = 4 +Parent.SpecialImplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "") +Parent.SpecialExplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "") Child.NormalImplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "Type[Foo]") Child.NormalExplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "Type[Foo]") @@ -945,3 +946,4 @@ c.NormalExplicit = 4 # E: Incompatible types in assignment (expression has type c.SpecialImplicit = 4 c.SpecialExplicit = 4 [builtins fixtures/tuple.pyi] +[typing fixtures/typing-medium.pyi] diff --git a/test-data/unit/fixtures/args.pyi b/test-data/unit/fixtures/args.pyi index 8d0ecc00f4b6..9985ccf84817 100644 --- a/test-data/unit/fixtures/args.pyi +++ b/test-data/unit/fixtures/args.pyi @@ -26,6 +26,7 @@ class list(Sequence[T], Generic[T]): pass class int: def __eq__(self, o: object) -> bool: pass +class float: pass class str: pass class bytes: pass class bool: pass diff --git a/test-data/unit/fixtures/type.pyi b/test-data/unit/fixtures/type.pyi index 755b45ff0bb5..77feb41ba70b 100644 --- a/test-data/unit/fixtures/type.pyi +++ b/test-data/unit/fixtures/type.pyi @@ -13,6 +13,7 @@ class list(Generic[T]): pass class type(Generic[T]): __name__: str def __or__(self, other: Union[type, None]) -> type: pass + def __ror__(self, other: Union[type, None]) -> type: pass def mro(self) -> List['type']: pass class tuple(Generic[T]): pass diff --git a/test-data/unit/fixtures/typing-full.pyi b/test-data/unit/fixtures/typing-full.pyi index c406da986818..04568f7c03f3 100644 --- a/test-data/unit/fixtures/typing-full.pyi +++ b/test-data/unit/fixtures/typing-full.pyi @@ -179,3 +179,5 @@ class _TypedDict(Mapping[str, object]): def pop(self, k: NoReturn, default: T = ...) -> object: ... def update(self: T, __m: T) -> None: ... def __delitem__(self, k: NoReturn) -> None: ... + +class _SpecialForm: pass diff --git a/test-data/unit/fixtures/typing-medium.pyi b/test-data/unit/fixtures/typing-medium.pyi index 568fe057c4cf..863b0703989d 100644 --- a/test-data/unit/fixtures/typing-medium.pyi +++ b/test-data/unit/fixtures/typing-medium.pyi @@ -68,4 +68,6 @@ class ContextManager(Generic[T]): # Use Any because not all the precise types are in the fixtures. def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> Any: pass +class _SpecialForm: pass + TYPE_CHECKING = 1 diff --git a/test-data/unit/lib-stub/_decimal.pyi b/test-data/unit/lib-stub/_decimal.pyi new file mode 100644 index 000000000000..2c2c5bff11f7 --- /dev/null +++ b/test-data/unit/lib-stub/_decimal.pyi @@ -0,0 +1,4 @@ +# Very simplified decimal stubs for use in tests + +class Decimal: + def __new__(cls, value: str = ...) -> Decimal: ... diff --git a/test-data/unit/lib-stub/datetime.pyi b/test-data/unit/lib-stub/datetime.pyi new file mode 100644 index 000000000000..7d71682d051d --- /dev/null +++ b/test-data/unit/lib-stub/datetime.pyi @@ -0,0 +1,16 @@ +# Very simplified datetime stubs for use in tests + +class datetime: + def __new__( + cls, + year: int, + month: int, + day: int, + hour: int = ..., + minute: int = ..., + second: int = ..., + microsecond: int = ..., + *, + fold: int = ..., + ) -> datetime: ... + def __format__(self, __fmt: str) -> str: ... diff --git a/test-data/unit/lib-stub/decimal.pyi b/test-data/unit/lib-stub/decimal.pyi new file mode 100644 index 000000000000..d2ab6eda9ff1 --- /dev/null +++ b/test-data/unit/lib-stub/decimal.pyi @@ -0,0 +1,3 @@ +# Very simplified decimal stubs for use in tests + +from _decimal import * diff --git a/test-data/unit/lib-stub/functools.pyi b/test-data/unit/lib-stub/functools.pyi new file mode 100644 index 000000000000..9e62a14c2f34 --- /dev/null +++ b/test-data/unit/lib-stub/functools.pyi @@ -0,0 +1,35 @@ +from typing import Generic, TypeVar, Callable, Any, Mapping + +_T = TypeVar("_T") + +class _SingleDispatchCallable(Generic[_T]): + registry: Mapping[Any, Callable[..., _T]] + def dispatch(self, cls: Any) -> Callable[..., _T]: ... + # @fun.register(complex) + # def _(arg, verbose=False): ... + @overload + def register(self, cls: type[Any], func: None = ...) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... + # @fun.register + # def _(arg: int, verbose=False): + @overload + def register(self, cls: Callable[..., _T], func: None = ...) -> Callable[..., _T]: ... + # fun.register(int, lambda x: x) + @overload + def register(self, cls: type[Any], func: Callable[..., _T]) -> Callable[..., _T]: ... + def _clear_cache(self) -> None: ... + def __call__(__self, *args: Any, **kwargs: Any) -> _T: ... + +def singledispatch(func: Callable[..., _T]) -> _SingleDispatchCallable[_T]: ... + +def total_ordering(cls: type[_T]) -> type[_T]: ... + +class cached_property(Generic[_T]): + func: Callable[[Any], _T] + attrname: str | None + def __init__(self, func: Callable[[Any], _T]) -> None: ... + @overload + def __get__(self, instance: None, owner: type[Any] | None = ...) -> cached_property[_T]: ... + @overload + def __get__(self, instance: object, owner: type[Any] | None = ...) -> _T: ... + def __set_name__(self, owner: type[Any], name: str) -> None: ... + def __class_getitem__(cls, item: Any) -> Any: ... diff --git a/test-data/unit/lib-stub/traceback.pyi b/test-data/unit/lib-stub/traceback.pyi new file mode 100644 index 000000000000..83c1891f80f5 --- /dev/null +++ b/test-data/unit/lib-stub/traceback.pyi @@ -0,0 +1,3 @@ +# Very simplified traceback stubs for use in tests + +def print_tb(*args, **kwargs) -> None: ... diff --git a/test-data/unit/lib-stub/unannotated_lib.pyi b/test-data/unit/lib-stub/unannotated_lib.pyi new file mode 100644 index 000000000000..90bfb6fa47d6 --- /dev/null +++ b/test-data/unit/lib-stub/unannotated_lib.pyi @@ -0,0 +1 @@ +def f(x): ... diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index 692f62bf6454..f6336b48ee7b 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1660,3 +1660,55 @@ _testNarrowTypeForDictKeys.py:7: note: Revealed type is "builtins.str" _testNarrowTypeForDictKeys.py:9: note: Revealed type is "Union[builtins.str, None]" _testNarrowTypeForDictKeys.py:14: note: Revealed type is "builtins.str" _testNarrowTypeForDictKeys.py:16: note: Revealed type is "Union[builtins.str, None]" + +[case testTypeAliasWithNewStyleUnion] +# flags: --python-version 3.10 +from typing import Literal, Type, TypeAlias + +Foo = Literal[1, 2] +reveal_type(Foo) +Bar1 = Foo | Literal[3] +Bar2 = Literal[3] | Foo +Bar3 = Foo | Foo | Literal[3] | Foo + +U1 = int | str +U2 = U1 | bytes +U3 = bytes | U1 + +Opt1 = None | int +Opt2 = None | float +Opt3 = int | None +Opt4 = float | None + +A = Type[int] | str +B: TypeAlias = Type[int] | str +[out] +_testTypeAliasWithNewStyleUnion.py:5: note: Revealed type is "typing._SpecialForm" + +[case testTypeAliasWithNewStyleUnionInStub] +# flags: --python-version 3.7 +import m + +[file m.pyi] +from typing import Type +from typing_extensions import Literal, TypeAlias + +Foo = Literal[1, 2] +reveal_type(Foo) +Bar1 = Foo | Literal[3] +Bar2 = Literal[3] | Foo +Bar3 = Foo | Foo | Literal[3] | Foo + +U1 = int | str +U2 = U1 | bytes +U3 = bytes | U1 + +Opt1 = None | int +Opt2 = None | float +Opt3 = int | None +Opt4 = float | None + +A = Type[int] | str +B: TypeAlias = Type[int] | str +[out] +m.pyi:5: note: Revealed type is "typing._SpecialForm" From 7465abd3d2fe4211a8aaa9453b402fea60071561 Mon Sep 17 00:00:00 2001 From: Valentin Stanciu <250871+svalentin@users.noreply.github.com> Date: Tue, 8 Nov 2022 15:22:56 +0200 Subject: [PATCH 08/16] Filter out wasm32 wheel in upload-pypi.py (#14035) Tried to make it in a way such that we can add more rules for platforms we want to filter out in the future. Tested it with `python3 misc/upload-pypi.py --dry-run 0.990` Fixes #14026 --- misc/upload-pypi.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/misc/upload-pypi.py b/misc/upload-pypi.py index be8da9e44f86..e60ec3cca207 100644 --- a/misc/upload-pypi.py +++ b/misc/upload-pypi.py @@ -29,6 +29,21 @@ def is_whl_or_tar(name: str) -> bool: return name.endswith(".tar.gz") or name.endswith(".whl") +def item_ok_for_pypi(name: str) -> bool: + if not is_whl_or_tar(name): + return False + + if name.endswith(".tar.gz"): + name = name[:-7] + if name.endswith(".whl"): + name = name[:-4] + + if name.endswith("wasm32"): + return False + + return True + + def get_release_for_tag(tag: str) -> dict[str, Any]: with urlopen(f"{BASE}/{REPO}/releases/tags/{tag}") as f: data = json.load(f) @@ -75,7 +90,7 @@ def check_sdist(dist: Path, version: str) -> None: def spot_check_dist(dist: Path, version: str) -> None: - items = [item for item in dist.iterdir() if is_whl_or_tar(item.name)] + items = [item for item in dist.iterdir() if item_ok_for_pypi(item.name)] assert len(items) > 10 assert all(version in item.name for item in items) assert any(item.name.endswith("py3-none-any.whl") for item in items) @@ -93,7 +108,7 @@ def tmp_twine() -> Iterator[Path]: def upload_dist(dist: Path, dry_run: bool = True) -> None: with tmp_twine() as twine: - files = [item for item in dist.iterdir() if is_whl_or_tar(item.name)] + files = [item for item in dist.iterdir() if item_ok_for_pypi(item.name)] cmd: list[Any] = [twine, "upload"] cmd += files if dry_run: From fa22a49ef1154285d4f251086f85cadf16e54cd9 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Tue, 8 Nov 2022 15:43:57 +0000 Subject: [PATCH 09/16] Fix crash on inference with recursive alias to recursive instance (#14038) Fixes #14031 It turns out premature optimization is the root of all evil. (It turns out this costs us less than 1% time on self-check). --- mypy/constraints.py | 3 ++- mypy/types.py | 6 ++++++ test-data/unit/check-recursive-types.test | 11 +++++++++++ 3 files changed, 19 insertions(+), 1 deletion(-) diff --git a/mypy/constraints.py b/mypy/constraints.py index 49b042d5baf0..2a641bf27ed5 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -177,8 +177,9 @@ def infer_constraints(template: Type, actual: Type, direction: int) -> list[Cons for (t, a) in reversed(TypeState.inferring) ): return [] - if has_recursive_types(template): + if has_recursive_types(template) or isinstance(get_proper_type(template), Instance): # This case requires special care because it may cause infinite recursion. + # Note that we include Instances because the may be recursive as str(Sequence[str]). if not has_type_vars(template): # Return early on an empty branch. return [] diff --git a/mypy/types.py b/mypy/types.py index e322cf02505f..2f0feb703f6a 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -3240,6 +3240,12 @@ def __init__(self) -> None: def visit_type_var(self, t: TypeVarType) -> bool: return True + def visit_type_var_tuple(self, t: TypeVarTupleType) -> bool: + return True + + def visit_param_spec(self, t: ParamSpecType) -> bool: + return True + def has_type_vars(typ: Type) -> bool: """Check if a type contains any type variables (recursively).""" diff --git a/test-data/unit/check-recursive-types.test b/test-data/unit/check-recursive-types.test index 0d727b109658..95b0918866f1 100644 --- a/test-data/unit/check-recursive-types.test +++ b/test-data/unit/check-recursive-types.test @@ -826,3 +826,14 @@ z = z x = y # E: Incompatible types in assignment (expression has type "L", variable has type "K") z = x # OK [builtins fixtures/tuple.pyi] + +[case testRecursiveInstanceInferenceNoCrash] +from typing import Sequence, TypeVar, Union + +class C(Sequence[C]): ... + +T = TypeVar("T") +def foo(x: T) -> C: ... + +Nested = Union[C, Sequence[Nested]] +x: Nested = foo(42) From dbcbb3f5c3ef791c98088da0bd1dfa6cbf51f301 Mon Sep 17 00:00:00 2001 From: dosisod <39638017+dosisod@users.noreply.github.com> Date: Tue, 8 Nov 2022 17:57:39 -0800 Subject: [PATCH 10/16] [mypyc] Use tabs instead of spaces in emitted C code (#14016) By using tabs instead of spaces for indentation in the emitted C code we are able to reduce the file size by almost 9%: | File | Size | |------|------| | `build/__native_74cdc94b2b24dafac2a2.c` (spaces) | 86.5MB | | `build/__native_74cdc94b2b24dafac2a2.c` (tabs) | 79.6MB | For this particular file we save about 6.9MB, or 8.7%. I checked, and this has no effect on the compilation speed, which is to be expected. At the very least opening these auto generated files inside an editor will be faster, even if the compilation isn't any faster. I am interested in making mypy/mypyc faster, and this seemed like a low-hanging fruit that could be beneficial. --- mypyc/codegen/emit.py | 6 +++--- mypyc/test/test_emit.py | 2 +- mypyc/test/test_emitfunc.py | 6 +++--- mypyc/test/test_emitwrapper.py | 3 ++- 4 files changed, 9 insertions(+), 8 deletions(-) diff --git a/mypyc/codegen/emit.py b/mypyc/codegen/emit.py index 5d47636b4c1e..15dece700a1e 100644 --- a/mypyc/codegen/emit.py +++ b/mypyc/codegen/emit.py @@ -176,10 +176,10 @@ def __init__( # Low-level operations def indent(self) -> None: - self._indent += 4 + self._indent += 1 def dedent(self) -> None: - self._indent -= 4 + self._indent -= 1 assert self._indent >= 0 def label(self, label: BasicBlock) -> str: @@ -194,7 +194,7 @@ def attr(self, name: str) -> str: def emit_line(self, line: str = "") -> None: if line.startswith("}"): self.dedent() - self.fragments.append(self._indent * " " + line + "\n") + self.fragments.append(self._indent * "\t" + line + "\n") if line.endswith("{"): self.indent() diff --git a/mypyc/test/test_emit.py b/mypyc/test/test_emit.py index 7351cd7fb13e..1b624a7a6cdb 100644 --- a/mypyc/test/test_emit.py +++ b/mypyc/test/test_emit.py @@ -28,4 +28,4 @@ def test_emit_line(self) -> None: emitter.emit_line("a {") emitter.emit_line("f();") emitter.emit_line("}") - assert emitter.fragments == ["line;\n", "a {\n", " f();\n", "}\n"] + assert emitter.fragments == ["line;\n", "a {\n", "\tf();\n", "}\n"] diff --git a/mypyc/test/test_emitfunc.py b/mypyc/test/test_emitfunc.py index d7dcf3be532b..3b44f7e444c8 100644 --- a/mypyc/test/test_emitfunc.py +++ b/mypyc/test/test_emitfunc.py @@ -833,7 +833,7 @@ def assert_emit( op.accept(visitor) frags = declarations.fragments + emitter.fragments - actual_lines = [line.strip(" ") for line in frags] + actual_lines = [line.strip(" \t") for line in frags] assert all(line.endswith("\n") for line in actual_lines) actual_lines = [line.rstrip("\n") for line in actual_lines] if not expected.strip(): @@ -900,7 +900,7 @@ def test_simple(self) -> None: " return cpy_r_arg;\n", "}\n", ], - result, + [line.replace("\t", 4 * " ") for line in result], msg="Generated code invalid", ) @@ -927,6 +927,6 @@ def test_register(self) -> None: " CPy_Unreachable();\n", "}\n", ], - result, + [line.replace("\t", 4 * " ") for line in result], msg="Generated code invalid", ) diff --git a/mypyc/test/test_emitwrapper.py b/mypyc/test/test_emitwrapper.py index c4465656444c..ec5adb4c6622 100644 --- a/mypyc/test/test_emitwrapper.py +++ b/mypyc/test/test_emitwrapper.py @@ -56,5 +56,6 @@ def test_check_int(self) -> None: ) def assert_lines(self, expected: list[str], actual: list[str]) -> None: - actual = [line.rstrip("\n") for line in actual] + actual = [line.rstrip("\n").replace(4 * " ", "\t") for line in actual] + expected = [line.replace(4 * " ", "\t") for line in expected] assert_string_arrays_equal(expected, actual, "Invalid output") From 5cc14390337f28e6f90efaf0ad5c4b1d322a8638 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 10 Nov 2022 17:37:09 +0000 Subject: [PATCH 11/16] Don't ignore errors in files passed on the command line (#14060) #13768 had a bug so that errors were sometimes silenced in files that were under a directory in `sys.path`. `sys.path` sometimes includes the current working directory, resulting in no errors reported at all. Fix it by always reporting errors if a file was passed on the command line (unless *explicitly* silenced). When using import following errors can still be ignored, which is questionable, but this didn't change recently so I'm not addressing it here. Fixes #14042. --- mypy/build.py | 10 ++++-- mypy/dmypy_server.py | 14 ++++---- mypy/modulefinder.py | 8 +++-- mypy/server/update.py | 28 +++++++++++----- mypy/test/testcmdline.py | 8 ++--- test-data/unit/cmdline.test | 65 +++++++++++++++++++++++++++++++++++++ 6 files changed, 108 insertions(+), 25 deletions(-) diff --git a/mypy/build.py b/mypy/build.py index 31851680ea82..27dc1141ce28 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -1940,7 +1940,7 @@ def __init__( raise if follow_imports == "silent": self.ignore_all = True - elif path and is_silent_import_module(manager, path): + elif path and is_silent_import_module(manager, path) and not root_source: self.ignore_all = True self.path = path if path: @@ -2629,7 +2629,7 @@ def find_module_and_diagnose( else: skipping_module(manager, caller_line, caller_state, id, result) raise ModuleNotFound - if is_silent_import_module(manager, result): + if is_silent_import_module(manager, result) and not root_source: follow_imports = "silent" return (result, follow_imports) else: @@ -3024,7 +3024,11 @@ def load_graph( for bs in sources: try: st = State( - id=bs.module, path=bs.path, source=bs.text, manager=manager, root_source=True + id=bs.module, + path=bs.path, + source=bs.text, + manager=manager, + root_source=not bs.followed, ) except ModuleNotFound: continue diff --git a/mypy/dmypy_server.py b/mypy/dmypy_server.py index 671999065e7d..be2f4ab8d618 100644 --- a/mypy/dmypy_server.py +++ b/mypy/dmypy_server.py @@ -592,7 +592,7 @@ def fine_grained_increment_follow_imports(self, sources: list[BuildSource]) -> l sources.extend(new_files) # Process changes directly reachable from roots. - messages = fine_grained_manager.update(changed, []) + messages = fine_grained_manager.update(changed, [], followed=True) # Follow deps from changed modules (still within graph). worklist = changed[:] @@ -609,13 +609,13 @@ def fine_grained_increment_follow_imports(self, sources: list[BuildSource]) -> l sources2, graph, seen, changed_paths ) self.update_sources(new_files) - messages = fine_grained_manager.update(changed, []) + messages = fine_grained_manager.update(changed, [], followed=True) worklist.extend(changed) t2 = time.time() def refresh_file(module: str, path: str) -> list[str]: - return fine_grained_manager.update([(module, path)], []) + return fine_grained_manager.update([(module, path)], [], followed=True) for module_id, state in list(graph.items()): new_messages = refresh_suppressed_submodules( @@ -632,10 +632,10 @@ def refresh_file(module: str, path: str) -> list[str]: new_unsuppressed = self.find_added_suppressed(graph, seen, manager.search_paths) if not new_unsuppressed: break - new_files = [BuildSource(mod[1], mod[0]) for mod in new_unsuppressed] + new_files = [BuildSource(mod[1], mod[0], followed=True) for mod in new_unsuppressed] sources.extend(new_files) self.update_sources(new_files) - messages = fine_grained_manager.update(new_unsuppressed, []) + messages = fine_grained_manager.update(new_unsuppressed, [], followed=True) for module_id, path in new_unsuppressed: new_messages = refresh_suppressed_submodules( @@ -717,7 +717,7 @@ def find_reachable_changed_modules( for dep in state.dependencies: if dep not in seen: seen.add(dep) - worklist.append(BuildSource(graph[dep].path, graph[dep].id)) + worklist.append(BuildSource(graph[dep].path, graph[dep].id, followed=True)) return changed, new_files def direct_imports( @@ -725,7 +725,7 @@ def direct_imports( ) -> list[BuildSource]: """Return the direct imports of module not included in seen.""" state = graph[module[0]] - return [BuildSource(graph[dep].path, dep) for dep in state.dependencies] + return [BuildSource(graph[dep].path, dep, followed=True) for dep in state.dependencies] def find_added_suppressed( self, graph: mypy.build.Graph, seen: set[str], search_paths: SearchPaths diff --git a/mypy/modulefinder.py b/mypy/modulefinder.py index 5d542b154906..e64dba5ce29d 100644 --- a/mypy/modulefinder.py +++ b/mypy/modulefinder.py @@ -115,15 +115,19 @@ def __init__( module: str | None, text: str | None = None, base_dir: str | None = None, + followed: bool = False, ) -> None: self.path = path # File where it's found (e.g. 'xxx/yyy/foo/bar.py') self.module = module or "__main__" # Module name (e.g. 'foo.bar') self.text = text # Source code, if initially supplied, else None self.base_dir = base_dir # Directory where the package is rooted (e.g. 'xxx/yyy') + self.followed = followed # Was this found by following imports? def __repr__(self) -> str: - return "BuildSource(path={!r}, module={!r}, has_text={}, base_dir={!r})".format( - self.path, self.module, self.text is not None, self.base_dir + return ( + "BuildSource(path={!r}, module={!r}, has_text={}, base_dir={!r}, followed={})".format( + self.path, self.module, self.text is not None, self.base_dir, self.followed + ) ) diff --git a/mypy/server/update.py b/mypy/server/update.py index 686068a4aad0..a1f57b5a6746 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -203,7 +203,10 @@ def __init__(self, result: BuildResult) -> None: self.processed_targets: list[str] = [] def update( - self, changed_modules: list[tuple[str, str]], removed_modules: list[tuple[str, str]] + self, + changed_modules: list[tuple[str, str]], + removed_modules: list[tuple[str, str]], + followed: bool = False, ) -> list[str]: """Update previous build result by processing changed modules. @@ -219,6 +222,7 @@ def update( Assume this is correct; it's not validated here. removed_modules: Modules that have been deleted since the previous update or removed from the build. + followed: If True, the modules were found through following imports Returns: A list of errors. @@ -256,7 +260,9 @@ def update( self.blocking_error = None while True: - result = self.update_one(changed_modules, initial_set, removed_set, blocking_error) + result = self.update_one( + changed_modules, initial_set, removed_set, blocking_error, followed + ) changed_modules, (next_id, next_path), blocker_messages = result if blocker_messages is not None: @@ -329,6 +335,7 @@ def update_one( initial_set: set[str], removed_set: set[str], blocking_error: str | None, + followed: bool, ) -> tuple[list[tuple[str, str]], tuple[str, str], list[str] | None]: """Process a module from the list of changed modules. @@ -355,7 +362,7 @@ def update_one( ) return changed_modules, (next_id, next_path), None - result = self.update_module(next_id, next_path, next_id in removed_set) + result = self.update_module(next_id, next_path, next_id in removed_set, followed) remaining, (next_id, next_path), blocker_messages = result changed_modules = [(id, path) for id, path in changed_modules if id != next_id] changed_modules = dedupe_modules(remaining + changed_modules) @@ -368,7 +375,7 @@ def update_one( return changed_modules, (next_id, next_path), blocker_messages def update_module( - self, module: str, path: str, force_removed: bool + self, module: str, path: str, force_removed: bool, followed: bool ) -> tuple[list[tuple[str, str]], tuple[str, str], list[str] | None]: """Update a single modified module. @@ -380,6 +387,7 @@ def update_module( path: File system path of the module force_removed: If True, consider module removed from the build even if path exists (used for removing an existing file from the build) + followed: Was this found via import following? Returns: Tuple with these items: @@ -417,7 +425,7 @@ def update_module( manager.errors.reset() self.processed_targets.append(module) result = update_module_isolated( - module, path, manager, previous_modules, graph, force_removed + module, path, manager, previous_modules, graph, force_removed, followed ) if isinstance(result, BlockedUpdate): # Blocking error -- just give up @@ -552,6 +560,7 @@ def update_module_isolated( previous_modules: dict[str, str], graph: Graph, force_removed: bool, + followed: bool, ) -> UpdateResult: """Build a new version of one changed module only. @@ -575,7 +584,7 @@ def update_module_isolated( delete_module(module, path, graph, manager) return NormalUpdate(module, path, [], None) - sources = get_sources(manager.fscache, previous_modules, [(module, path)]) + sources = get_sources(manager.fscache, previous_modules, [(module, path)], followed) if module in manager.missing_modules: manager.missing_modules.remove(module) @@ -728,12 +737,15 @@ def get_module_to_path_map(graph: Graph) -> dict[str, str]: def get_sources( - fscache: FileSystemCache, modules: dict[str, str], changed_modules: list[tuple[str, str]] + fscache: FileSystemCache, + modules: dict[str, str], + changed_modules: list[tuple[str, str]], + followed: bool, ) -> list[BuildSource]: sources = [] for id, path in changed_modules: if fscache.isfile(path): - sources.append(BuildSource(path, id, None)) + sources.append(BuildSource(path, id, None, followed=followed)) return sources diff --git a/mypy/test/testcmdline.py b/mypy/test/testcmdline.py index 268b6bab1ec2..2e8b0dc9a1cd 100644 --- a/mypy/test/testcmdline.py +++ b/mypy/test/testcmdline.py @@ -69,12 +69,10 @@ def test_python_cmdline(testcase: DataDrivenTestCase, step: int) -> None: env["PYTHONPATH"] = PREFIX if os.path.isdir(extra_path): env["PYTHONPATH"] += os.pathsep + extra_path + cwd = os.path.join(test_temp_dir, custom_cwd or "") + args = [arg.replace("$CWD", os.path.abspath(cwd)) for arg in args] process = subprocess.Popen( - fixed + args, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - cwd=os.path.join(test_temp_dir, custom_cwd or ""), - env=env, + fixed + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env ) outb, errb = process.communicate() result = process.returncode diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test index 2ea7f07da3bc..92b0af6942bc 100644 --- a/test-data/unit/cmdline.test +++ b/test-data/unit/cmdline.test @@ -1505,3 +1505,68 @@ def f(): [out] a.py:2: note: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs == Return code: 0 + +[case testCustomTypeshedDirFilePassedExplicitly] +# cmd: mypy --custom-typeshed-dir dir m.py dir/stdlib/foo.pyi +[file m.py] +1() +[file dir/stdlib/abc.pyi] +1() # Errors are not reported from typeshed by default +[file dir/stdlib/builtins.pyi] +class object: pass +class str(object): pass +class int(object): pass +[file dir/stdlib/sys.pyi] +[file dir/stdlib/types.pyi] +[file dir/stdlib/typing.pyi] +[file dir/stdlib/mypy_extensions.pyi] +[file dir/stdlib/typing_extensions.pyi] +[file dir/stdlib/foo.pyi] +1() # Errors are reported if the file was explicitly passed on the command line +[file dir/stdlib/VERSIONS] +[out] +dir/stdlib/foo.pyi:1: error: "int" not callable +m.py:1: error: "int" not callable + +[case testFileInPythonPathPassedExplicitly1] +# cmd: mypy $CWD/pypath/foo.py +[file pypath/foo.py] +1() +[out] +pypath/foo.py:1: error: "int" not callable + +[case testFileInPythonPathPassedExplicitly2] +# cmd: mypy pypath/foo.py +[file pypath/foo.py] +1() +[out] +pypath/foo.py:1: error: "int" not callable + +[case testFileInPythonPathPassedExplicitly3] +# cmd: mypy -p foo +# cwd: pypath +[file pypath/foo/__init__.py] +1() +[file pypath/foo/m.py] +1() +[out] +foo/m.py:1: error: "int" not callable +foo/__init__.py:1: error: "int" not callable + +[case testFileInPythonPathPassedExplicitly4] +# cmd: mypy -m foo +# cwd: pypath +[file pypath/foo.py] +1() +[out] +foo.py:1: error: "int" not callable + +[case testFileInPythonPathPassedExplicitly5] +# cmd: mypy -m foo.m +# cwd: pypath +[file pypath/foo/__init__.py] +1() # TODO: Maybe this should generate errors as well? But how would we decide? +[file pypath/foo/m.py] +1() +[out] +foo/m.py:1: error: "int" not callable From b18281c857a3e089740b6c6c03ff52a529c1b0ba Mon Sep 17 00:00:00 2001 From: Nick Drozd Date: Thu, 10 Nov 2022 14:20:38 -0600 Subject: [PATCH 12/16] Simplify boolean return logic in various places (#14012) Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com> --- misc/fix_annotate.py | 9 +++--- mypy/build.py | 20 ++++++-------- mypy/checker.py | 24 ++++++---------- mypy/modulefinder.py | 23 +++++++--------- mypy/stubgen.py | 5 +--- mypy/subtypes.py | 34 ++++++++--------------- mypy/typeanal.py | 4 +-- mypy/types.py | 47 ++++++++++++++++---------------- mypyc/ir/class_ir.py | 5 +--- mypyc/irbuild/prebuildvisitor.py | 10 +++---- mypyc/irbuild/prepare.py | 23 ++++++++++------ 11 files changed, 86 insertions(+), 118 deletions(-) diff --git a/misc/fix_annotate.py b/misc/fix_annotate.py index b661a899924c..7fffba8a8507 100644 --- a/misc/fix_annotate.py +++ b/misc/fix_annotate.py @@ -213,8 +213,7 @@ def has_return_exprs(self, node): results = {} if self.return_expr.match(node, results): return True - for child in node.children: - if child.type not in (syms.funcdef, syms.classdef): - if self.has_return_exprs(child): - return True - return False + return any( + child.type not in (syms.funcdef, syms.classdef) and self.has_return_exprs(child) + for child in node.children + ) diff --git a/mypy/build.py b/mypy/build.py index 27dc1141ce28..62367c35915e 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -2728,11 +2728,8 @@ def in_partial_package(id: str, manager: BuildManager) -> bool: else: parent_mod = parent_st.tree if parent_mod is not None: - if parent_mod.is_partial_stub_package: - return True - else: - # Bail out soon, complete subpackage found - return False + # Bail out soon, complete subpackage found + return parent_mod.is_partial_stub_package id = parent return False @@ -3580,9 +3577,10 @@ def record_missing_stub_packages(cache_dir: str, missing_stub_packages: set[str] def is_silent_import_module(manager: BuildManager, path: str) -> bool: - if not manager.options.no_silence_site_packages: - for dir in manager.search_paths.package_path + manager.search_paths.typeshed_path: - if is_sub_path(path, dir): - # Silence errors in site-package dirs and typeshed - return True - return False + if manager.options.no_silence_site_packages: + return False + # Silence errors in site-package dirs and typeshed + return any( + is_sub_path(path, dir) + for dir in manager.search_paths.package_path + manager.search_paths.typeshed_path + ) diff --git a/mypy/checker.py b/mypy/checker.py index 67d132afe2c7..fef85d085496 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -2292,9 +2292,7 @@ def is_final_enum_value(self, sym: SymbolTableNode) -> bool: ): return False - if self.is_stub or sym.node.has_explicit_value: - return True - return False + return self.is_stub or sym.node.has_explicit_value def check_enum_bases(self, defn: ClassDef) -> None: """ @@ -5978,10 +5976,7 @@ def store_type(self, node: Expression, typ: Type) -> None: self._type_maps[-1][node] = typ def has_type(self, node: Expression) -> bool: - for m in reversed(self._type_maps): - if node in m: - return True - return False + return any(node in m for m in reversed(self._type_maps)) def lookup_type_or_none(self, node: Expression) -> Type | None: for m in reversed(self._type_maps): @@ -6152,13 +6147,11 @@ def handle_partial_var_type( return fixup_partial_type(typ) def is_defined_in_base_class(self, var: Var) -> bool: - if var.info: - for base in var.info.mro[1:]: - if base.get(var.name) is not None: - return True - if var.info.fallback_to_any: - return True - return False + if not var.info: + return False + return var.info.fallback_to_any or any( + base.get(var.name) is not None for base in var.info.mro[1:] + ) def find_partial_types(self, var: Var) -> dict[Var, Context] | None: """Look for an active partial type scope containing variable. @@ -6354,8 +6347,7 @@ def is_writable_attribute(self, node: Node) -> bool: elif isinstance(node, OverloadedFuncDef) and node.is_property: first_item = cast(Decorator, node.items[0]) return first_item.var.is_settable_property - else: - return False + return False def get_isinstance_type(self, expr: Expression) -> list[TypeRange] | None: if isinstance(expr, OpExpr) and expr.op == "|": diff --git a/mypy/modulefinder.py b/mypy/modulefinder.py index e64dba5ce29d..b2abb4847705 100644 --- a/mypy/modulefinder.py +++ b/mypy/modulefinder.py @@ -148,14 +148,11 @@ def __init__(self, sources: list[BuildSource]) -> None: self.source_modules[source.module] = source.path or "" def is_source(self, file: MypyFile) -> bool: - if file.path and file.path in self.source_paths: - return True - elif file._fullname in self.source_modules: - return True - elif self.source_text_present: - return True - else: - return False + return ( + (file.path and file.path in self.source_paths) + or file._fullname in self.source_modules + or self.source_text_present + ) class FindModuleCache: @@ -573,11 +570,11 @@ def _is_compatible_stub_package(self, stub_dir: str) -> bool: whether the stubs are compatible with Python 2 and 3. """ metadata_fnam = os.path.join(stub_dir, "METADATA.toml") - if os.path.isfile(metadata_fnam): - with open(metadata_fnam, "rb") as f: - metadata = tomllib.load(f) - return bool(metadata.get("python3", True)) - return True + if not os.path.isfile(metadata_fnam): + return True + with open(metadata_fnam, "rb") as f: + metadata = tomllib.load(f) + return bool(metadata.get("python3", True)) def find_modules_recursive(self, module: str) -> list[BuildSource]: module_path = self.find_module(module) diff --git a/mypy/stubgen.py b/mypy/stubgen.py index fbae9ebaa252..8c7e24504270 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -1309,10 +1309,7 @@ def is_private_name(self, name: str, fullname: str | None = None) -> bool: def is_private_member(self, fullname: str) -> bool: parts = fullname.split(".") - for part in parts: - if self.is_private_name(part): - return True - return False + return any(self.is_private_name(part) for part in parts) def get_str_type_of_node( self, rvalue: Expression, can_infer_optional: bool = False, can_be_any: bool = True diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 2724379ab878..9a4982f5b8ec 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -429,22 +429,18 @@ def visit_erased_type(self, left: ErasedType) -> bool: # This may be encountered during type inference. The result probably doesn't # matter much. # TODO: it actually does matter, figure out more principled logic about this. - if self.subtype_context.keep_erased_types: - return False - return True + return not self.subtype_context.keep_erased_types def visit_deleted_type(self, left: DeletedType) -> bool: return True def visit_instance(self, left: Instance) -> bool: if left.type.fallback_to_any and not self.proper_subtype: - if isinstance(self.right, NoneType): - # NOTE: `None` is a *non-subclassable* singleton, therefore no class - # can by a subtype of it, even with an `Any` fallback. - # This special case is needed to treat descriptors in classes with - # dynamic base classes correctly, see #5456. - return False - return True + # NOTE: `None` is a *non-subclassable* singleton, therefore no class + # can by a subtype of it, even with an `Any` fallback. + # This special case is needed to treat descriptors in classes with + # dynamic base classes correctly, see #5456. + return not isinstance(self.right, NoneType) right = self.right if isinstance(right, TupleType) and mypy.typeops.tuple_fallback(right).type.is_enum: return self._is_subtype(left, mypy.typeops.tuple_fallback(right)) @@ -513,11 +509,7 @@ def check_mixed( isinstance(unpacked_type, Instance) and unpacked_type.type.fullname == "builtins.tuple" ): - if not all( - is_equivalent(l, unpacked_type.args[0]) for l in compare_to - ): - return False - return True + return all(is_equivalent(l, unpacked_type.args[0]) for l in compare_to) if isinstance(unpacked_type, TypeVarTupleType): return False if isinstance(unpacked_type, AnyType): @@ -741,9 +733,8 @@ def visit_tuple_type(self, left: TupleType) -> bool: elif isinstance(right, TupleType): if len(left.items) != len(right.items): return False - for l, r in zip(left.items, right.items): - if not self._is_subtype(l, r): - return False + if any(not self._is_subtype(l, r) for l, r in zip(left.items, right.items)): + return False rfallback = mypy.typeops.tuple_fallback(right) if is_named_instance(rfallback, "builtins.tuple"): # No need to verify fallback. This is useful since the calculated fallback @@ -752,9 +743,7 @@ def visit_tuple_type(self, left: TupleType) -> bool: # join(Union[int, C], Union[str, C]) == Union[int, str, C]. return True lfallback = mypy.typeops.tuple_fallback(left) - if not self._is_subtype(lfallback, rfallback): - return False - return True + return self._is_subtype(lfallback, rfallback) else: return False @@ -1368,8 +1357,7 @@ def g(x: int) -> int: ... unified = unify_generic_callable(left, right, ignore_return=ignore_return) if unified is None: return False - else: - left = unified + left = unified # If we allow partial overlaps, we don't need to leave R generic: # if we can find even just a single typevar assignment which diff --git a/mypy/typeanal.py b/mypy/typeanal.py index 35f60f54605a..55d819071a3a 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -1729,9 +1729,7 @@ def __init__( def _seems_like_callable(self, type: UnboundType) -> bool: if not type.args: return False - if isinstance(type.args[0], (EllipsisType, TypeList, ParamSpecType)): - return True - return False + return isinstance(type.args[0], (EllipsisType, TypeList, ParamSpecType)) def visit_unbound_type(self, t: UnboundType) -> TypeVarLikeList: name = t.name diff --git a/mypy/types.py b/mypy/types.py index 2f0feb703f6a..a73c41904ea7 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -437,14 +437,12 @@ def __repr__(self) -> str: return self.raw_id.__repr__() def __eq__(self, other: object) -> bool: - if isinstance(other, TypeVarId): - return ( - self.raw_id == other.raw_id - and self.meta_level == other.meta_level - and self.namespace == other.namespace - ) - else: - return False + return ( + isinstance(other, TypeVarId) + and self.raw_id == other.raw_id + and self.meta_level == other.meta_level + and self.namespace == other.namespace + ) def __ne__(self, other: object) -> bool: return not (self == other) @@ -910,9 +908,7 @@ def __hash__(self) -> int: return hash(tuple(self.items)) def __eq__(self, other: object) -> bool: - if not isinstance(other, TypeList): - return False - return self.items == other.items + return isinstance(other, TypeList) and self.items == other.items class UnpackType(ProperType): @@ -2263,16 +2259,19 @@ def __hash__(self) -> int: return hash((frozenset(self.items.items()), self.fallback, frozenset(self.required_keys))) def __eq__(self, other: object) -> bool: - if isinstance(other, TypedDictType): - if frozenset(self.items.keys()) != frozenset(other.items.keys()): - return False - for (_, left_item_type, right_item_type) in self.zip(other): - if not left_item_type == right_item_type: - return False - return self.fallback == other.fallback and self.required_keys == other.required_keys - else: + if not isinstance(other, TypedDictType): return NotImplemented + return ( + frozenset(self.items.keys()) == frozenset(other.items.keys()) + and all( + left_item_type == right_item_type + for (_, left_item_type, right_item_type) in self.zip(other) + ) + and self.fallback == other.fallback + and self.required_keys == other.required_keys + ) + def serialize(self) -> JsonDict: return { ".class": "TypedDictType", @@ -3352,11 +3351,11 @@ def is_literal_type(typ: ProperType, fallback_fullname: str, value: LiteralValue """Check if this type is a LiteralType with the given fallback type and value.""" if isinstance(typ, Instance) and typ.last_known_value: typ = typ.last_known_value - if not isinstance(typ, LiteralType): - return False - if typ.fallback.type.fullname != fallback_fullname: - return False - return typ.value == value + return ( + isinstance(typ, LiteralType) + and typ.fallback.type.fullname == fallback_fullname + and typ.value == value + ) def is_self_type_like(typ: Type, *, is_classmethod: bool) -> bool: diff --git a/mypyc/ir/class_ir.py b/mypyc/ir/class_ir.py index 7f55decfd754..f0f772306e60 100644 --- a/mypyc/ir/class_ir.py +++ b/mypyc/ir/class_ir.py @@ -265,10 +265,7 @@ def has_attr(self, name: str) -> bool: return True def is_deletable(self, name: str) -> bool: - for ir in self.mro: - if name in ir.deletable: - return True - return False + return any(name in ir.deletable for ir in self.mro) def is_always_defined(self, name: str) -> bool: if self.is_deletable(name): diff --git a/mypyc/irbuild/prebuildvisitor.py b/mypyc/irbuild/prebuildvisitor.py index 7d52dc8da57c..d99453955002 100644 --- a/mypyc/irbuild/prebuildvisitor.py +++ b/mypyc/irbuild/prebuildvisitor.py @@ -162,12 +162,10 @@ def visit_symbol_node(self, symbol: SymbolNode) -> None: def is_parent(self, fitem: FuncItem, child: FuncItem) -> bool: # Check if child is nested within fdef (possibly indirectly # within multiple nested functions). - if child in self.nested_funcs: - parent = self.nested_funcs[child] - if parent == fitem: - return True - return self.is_parent(fitem, parent) - return False + if child not in self.nested_funcs: + return False + parent = self.nested_funcs[child] + return parent == fitem or self.is_parent(fitem, parent) def add_free_variable(self, symbol: SymbolNode) -> None: # Find the function where the symbol was (likely) first declared, diff --git a/mypyc/irbuild/prepare.py b/mypyc/irbuild/prepare.py index dc153ea11561..639d1a5ea0d1 100644 --- a/mypyc/irbuild/prepare.py +++ b/mypyc/irbuild/prepare.py @@ -178,15 +178,20 @@ def prepare_method_def( def is_valid_multipart_property_def(prop: OverloadedFuncDef) -> bool: # Checks to ensure supported property decorator semantics - if len(prop.items) == 2: - getter = prop.items[0] - setter = prop.items[1] - if isinstance(getter, Decorator) and isinstance(setter, Decorator): - if getter.func.is_property and len(setter.decorators) == 1: - if isinstance(setter.decorators[0], MemberExpr): - if setter.decorators[0].name == "setter": - return True - return False + if len(prop.items) != 2: + return False + + getter = prop.items[0] + setter = prop.items[1] + + return ( + isinstance(getter, Decorator) + and isinstance(setter, Decorator) + and getter.func.is_property + and len(setter.decorators) == 1 + and isinstance(setter.decorators[0], MemberExpr) + and setter.decorators[0].name == "setter" + ) def can_subclass_builtin(builtin_base: str) -> bool: From a48dd5ad7ff820da7ea1e947008fff2865e2296a Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Thu, 10 Nov 2022 15:37:07 -0800 Subject: [PATCH 13/16] Fix incompatible overrides of overloaded methods in concrete subclasses (#14017) Fixes #14002 --- mypy/checker.py | 17 ++++++ test-data/unit/check-selftype.test | 86 ++++++++++++++++++++++++++++++ 2 files changed, 103 insertions(+) diff --git a/mypy/checker.py b/mypy/checker.py index fef85d085496..2688a611b56a 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -1869,6 +1869,23 @@ def check_method_override_for_base_with_name( original_class_or_static = False # a variable can't be class or static if isinstance(original_type, FunctionLike): + active_self_type = self.scope.active_self_type() + if isinstance(original_type, Overloaded) and active_self_type: + # If we have an overload, filter to overloads that match the self type. + # This avoids false positives for concrete subclasses of generic classes, + # see testSelfTypeOverrideCompatibility for an example. + # It's possible we might want to do this as part of bind_and_map_method + filtered_items = [ + item + for item in original_type.items + if not item.arg_types or is_subtype(active_self_type, item.arg_types[0]) + ] + # If we don't have any filtered_items, maybe it's always a valid override + # of the superclass? However if you get to that point you're in murky type + # territory anyway, so we just preserve the type and have the behaviour match + # that of older versions of mypy. + if filtered_items: + original_type = Overloaded(filtered_items) original_type = self.bind_and_map_method(base_attr, original_type, defn.info, base) if original_node and is_property(original_node): original_type = get_property_type(original_type) diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test index bfb0eb5a4d89..3d801d23a642 100644 --- a/test-data/unit/check-selftype.test +++ b/test-data/unit/check-selftype.test @@ -156,6 +156,92 @@ class B2(A[T]): def f(self: A[bytes]) -> bytes: ... def f(self): ... +class C(A[int]): + def f(self) -> int: ... + +class D(A[str]): + def f(self) -> int: ... # E: Signature of "f" incompatible with supertype "A" \ + # N: Superclass: \ + # N: @overload \ + # N: def f(self) -> str \ + # N: Subclass: \ + # N: def f(self) -> int + +class E(A[T]): + def f(self) -> int: ... # E: Signature of "f" incompatible with supertype "A" \ + # N: Superclass: \ + # N: @overload \ + # N: def f(self) -> int \ + # N: @overload \ + # N: def f(self) -> str \ + # N: Subclass: \ + # N: def f(self) -> int + + +class F(A[bytes]): + # Note there's an argument to be made that this is actually compatible with the supertype + def f(self) -> bytes: ... # E: Signature of "f" incompatible with supertype "A" \ + # N: Superclass: \ + # N: @overload \ + # N: def f(self) -> int \ + # N: @overload \ + # N: def f(self) -> str \ + # N: Subclass: \ + # N: def f(self) -> bytes + +class G(A): + def f(self): ... + +class H(A[int]): + def f(self): ... + +class I(A[int]): + def f(*args): ... + +class J(A[int]): + def f(self, arg) -> int: ... # E: Signature of "f" incompatible with supertype "A" \ + # N: Superclass: \ + # N: @overload \ + # N: def f(self) -> int \ + # N: Subclass: \ + # N: def f(self, arg: Any) -> int + +[builtins fixtures/tuple.pyi] + +[case testSelfTypeOverrideCompatibilityTypeVar-xfail] +from typing import overload, TypeVar, Union + +AT = TypeVar("AT", bound="A") + +class A: + @overload + def f(self: AT, x: int) -> AT: ... + @overload + def f(self, x: str) -> None: ... + @overload + def f(self: AT) -> bytes: ... + def f(*a, **kw): ... + +class B(A): + @overload # E: Signature of "f" incompatible with supertype "A" \ + # N: Superclass: \ + # N: @overload \ + # N: def f(self, x: int) -> B \ + # N: @overload \ + # N: def f(self, x: str) -> None \ + # N: @overload \ + # N: def f(self) -> bytes \ + # N: Subclass: \ + # N: @overload \ + # N: def f(self, x: int) -> B \ + # N: @overload \ + # N: def f(self, x: str) -> None + def f(self, x: int) -> B: ... + @overload + def f(self, x: str) -> None: ... + def f(*a, **kw): ... +[builtins fixtures/dict.pyi] + [case testSelfTypeSuper] from typing import TypeVar, cast From 70e544b1fb848448fc702e95a84edfa5ab628d3c Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 11 Nov 2022 05:01:47 -0800 Subject: [PATCH 14/16] Update --no-warn-no-return docs for empty body changes (#14065) Fixes #14048 Co-authored-by: Jelle Zijlstra --- docs/source/command_line.rst | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/docs/source/command_line.rst b/docs/source/command_line.rst index 83d2983472be..31d23db204eb 100644 --- a/docs/source/command_line.rst +++ b/docs/source/command_line.rst @@ -448,9 +448,10 @@ potentially problematic or redundant in some way. are when: - The function has a ``None`` or ``Any`` return type - - The function has an empty body or a body that is just - ellipsis (``...``). Empty functions are often used for - abstract methods. + - The function has an empty body and is marked as an abstract method, + is in a protocol class, or is in a stub file + - The execution path can never return; for example, if an exception + is always raised Passing in :option:`--no-warn-no-return` will disable these error messages in all cases. From f78d1fdc154d507353b34e7ea2037ef68de4e6fc Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 11 Nov 2022 05:29:31 -0800 Subject: [PATCH 15/16] Fix another crash with report generation on namespace packages (#14063) Fixes #14046. Similar to #13733 Best reviewed with hide whitespace. --- mypy/report.py | 85 ++++++++++++++++++++++++-------------------------- 1 file changed, 41 insertions(+), 44 deletions(-) diff --git a/mypy/report.py b/mypy/report.py index 37b7497f1371..3fac2234c840 100644 --- a/mypy/report.py +++ b/mypy/report.py @@ -637,51 +637,48 @@ def on_file( etree.SubElement(class_element, "methods") lines_element = etree.SubElement(class_element, "lines") - with tokenize.open(path) as input_file: - class_lines_covered = 0 - class_total_lines = 0 - for lineno, _ in enumerate(input_file, 1): - status = visitor.line_map.get(lineno, stats.TYPE_EMPTY) - hits = 0 - branch = False - if status == stats.TYPE_EMPTY: - continue - class_total_lines += 1 - if status != stats.TYPE_ANY: - class_lines_covered += 1 - hits = 1 - if status == stats.TYPE_IMPRECISE: - branch = True - file_info.counts[status] += 1 - line_element = etree.SubElement( - lines_element, - "line", - branch=str(branch).lower(), - hits=str(hits), - number=str(lineno), - precision=stats.precision_names[status], - ) - if branch: - line_element.attrib["condition-coverage"] = "50% (1/2)" - class_element.attrib["branch-rate"] = "0" - class_element.attrib["line-rate"] = get_line_rate( - class_lines_covered, class_total_lines + class_lines_covered = 0 + class_total_lines = 0 + for lineno, _ in iterate_python_lines(path): + status = visitor.line_map.get(lineno, stats.TYPE_EMPTY) + hits = 0 + branch = False + if status == stats.TYPE_EMPTY: + continue + class_total_lines += 1 + if status != stats.TYPE_ANY: + class_lines_covered += 1 + hits = 1 + if status == stats.TYPE_IMPRECISE: + branch = True + file_info.counts[status] += 1 + line_element = etree.SubElement( + lines_element, + "line", + branch=str(branch).lower(), + hits=str(hits), + number=str(lineno), + precision=stats.precision_names[status], ) - # parent_module is set to whichever module contains this file. For most files, we want - # to simply strip the last element off of the module. But for __init__.py files, - # the module == the parent module. - parent_module = file_info.module.rsplit(".", 1)[0] - if file_info.name.endswith("__init__.py"): - parent_module = file_info.module - - if parent_module not in self.root_package.packages: - self.root_package.packages[parent_module] = CoberturaPackage(parent_module) - current_package = self.root_package.packages[parent_module] - packages_to_update = [self.root_package, current_package] - for package in packages_to_update: - package.total_lines += class_total_lines - package.covered_lines += class_lines_covered - current_package.classes[class_name] = class_element + if branch: + line_element.attrib["condition-coverage"] = "50% (1/2)" + class_element.attrib["branch-rate"] = "0" + class_element.attrib["line-rate"] = get_line_rate(class_lines_covered, class_total_lines) + # parent_module is set to whichever module contains this file. For most files, we want + # to simply strip the last element off of the module. But for __init__.py files, + # the module == the parent module. + parent_module = file_info.module.rsplit(".", 1)[0] + if file_info.name.endswith("__init__.py"): + parent_module = file_info.module + + if parent_module not in self.root_package.packages: + self.root_package.packages[parent_module] = CoberturaPackage(parent_module) + current_package = self.root_package.packages[parent_module] + packages_to_update = [self.root_package, current_package] + for package in packages_to_update: + package.total_lines += class_total_lines + package.covered_lines += class_lines_covered + current_package.classes[class_name] = class_element def on_finish(self) -> None: self.root.attrib["line-rate"] = get_line_rate( From 67855fab376c3c2d68bcf0940b242c9b71a98156 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 11 Nov 2022 12:08:29 -0800 Subject: [PATCH 16/16] Fix crash with function redefinition (#14064) Fixes #14027 (issue was surfaced by #13509) --- mypy/checker.py | 5 ++++- test-data/unit/check-functions.test | 14 ++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/mypy/checker.py b/mypy/checker.py index 2688a611b56a..e727c343aa14 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -960,7 +960,10 @@ def _visit_func_def(self, defn: FuncDef) -> None: # Function definition overrides a variable initialized via assignment or a # decorated function. orig_type = defn.original_def.type - assert orig_type is not None, f"Error checking function redefinition {defn}" + if orig_type is None: + # If other branch is unreachable, we don't type check it and so we might + # not have a type for the original definition + return if isinstance(orig_type, PartialType): if orig_type.type is None: # Ah this is a partial type. Give it the type of the function. diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test index bb36b65f35de..ae6424f743be 100644 --- a/test-data/unit/check-functions.test +++ b/test-data/unit/check-functions.test @@ -1475,6 +1475,20 @@ else: @dec def f(): pass +[case testConditionalFunctionDefinitionUnreachable] +def bar() -> None: + if False: + foo = 1 + else: + def foo(obj): ... + +def baz() -> None: + if False: + foo: int = 1 + else: + def foo(obj): ... # E: Incompatible redefinition (redefinition with type "Callable[[Any], Any]", original type "int") +[builtins fixtures/tuple.pyi] + [case testConditionalRedefinitionOfAnUnconditionalFunctionDefinition1] from typing import Any def f(x: str) -> None: pass